content
stringlengths
10
4.9M
package moze_intel.projecte.api.tile; import javax.annotation.Nonnull; import javax.annotation.Nullable; import moze_intel.projecte.api.ProjectEAPI; import moze_intel.projecte.api.capabilities.tile.IEmcStorage; import net.minecraft.block.BlockState; import net.minecraft.nbt.CompoundNBT; import net.minecraft.tileentity.TileEntity; import net.minecraft.tileentity.TileEntityType; import net.minecraft.util.Direction; import net.minecraftforge.common.capabilities.Capability; import net.minecraftforge.common.util.LazyOptional; /** * Base class for the reference implementations IEmcStorage * * Extend this if you want fine-grained control over all aspects of how your tile provides or accepts EMC * * @author williewillus */ public class TileEmcBase extends TileEntity implements IEmcStorage { private LazyOptional<IEmcStorage> emcStorageCapability; private long maximumEMC; private long currentEMC; protected TileEmcBase(TileEntityType<?> type) { super(type); setMaximumEMC(Long.MAX_VALUE); } public final void setMaximumEMC(long max) { maximumEMC = max; if (getStoredEmc() > getMaximumEmc()) { currentEMC = getMaximumEmc(); storedEmcChanged(); } } @Override public long getStoredEmc() { return currentEMC; } @Override public long getMaximumEmc() { return maximumEMC; } /** * @return The maximum amount of Emc that can be inserted at once into this {@link IEmcStorage} */ protected long getEmcInsertLimit() { return getNeededEmc(); } /** * @return The maximum amount of Emc that can be extracted at once from this {@link IEmcStorage} */ protected long getEmcExtractLimit() { return getStoredEmc(); } /** * Set this to false to stop this Emc tile from accepting Emc. */ protected boolean canAcceptEmc() { return true; } /** * Set this to false to stop this Emc tile from being able to provide Emc. */ protected boolean canProvideEmc() { return true; } @Override public long extractEmc(long toExtract, EmcAction action) { if (toExtract < 0) { return insertEmc(-toExtract, action); } if (canProvideEmc()) { return forceExtractEmc(Math.min(getEmcExtractLimit(), toExtract), action); } return 0; } @Override public long insertEmc(long toAccept, EmcAction action) { if (toAccept < 0) { return extractEmc(-toAccept, action); } if (canAcceptEmc()) { return forceInsertEmc(Math.min(getEmcInsertLimit(), toAccept), action); } return 0; } /** * Similar to {@link IEmcStorage#extractEmc(long, EmcAction)} except, it is an internal method for use of removing EMC except it ignores if we can provide EMC * externally or not. * * @param toExtract The maximum amount to extract * @param action The action to perform, either {@link EmcAction#EXECUTE} or {@link EmcAction#SIMULATE} * * @return The amount actually accepted * * @apiNote For internal use this rather than {@link IEmcStorage#extractEmc(long, EmcAction)}, as it will probably behave more as expected. */ protected long forceExtractEmc(long toExtract, EmcAction action) { if (toExtract < 0) { return forceInsertEmc(-toExtract, action); } long toRemove = Math.min(getStoredEmc(), toExtract); if (action.execute()) { currentEMC -= toRemove; storedEmcChanged(); } return toRemove; } /** * Similar to {@link IEmcStorage#insertEmc(long, EmcAction)} except, it is an internal method for use of adding EMC except it ignores if we can accept EMC externally * or not, and instead of handling negative values it just acts as if zero was passed. * * @param toAccept The maximum amount to accept * @param action The action to perform, either {@link EmcAction#EXECUTE} or {@link EmcAction#SIMULATE} * * @return The amount actually accepted * * @apiNote For internal use this rather than {@link IEmcStorage#insertEmc(long, EmcAction)}, as it will probably behave more as expected. */ protected long forceInsertEmc(long toAccept, EmcAction action) { if (toAccept < 0) { return forceExtractEmc(-toAccept, action); } long toAdd = Math.min(getNeededEmc(), toAccept); if (action.execute()) { currentEMC += toAdd; storedEmcChanged(); } return toAdd; } /** * Called when the amount of EMC stored changes. */ protected void storedEmcChanged() { setChanged(); } @Nonnull @Override public CompoundNBT save(@Nonnull CompoundNBT tag) { tag = super.save(tag); if (getStoredEmc() > getMaximumEmc()) { currentEMC = getMaximumEmc(); } tag.putLong("EMC", getStoredEmc()); return tag; } @Override public void load(@Nonnull BlockState state, @Nonnull CompoundNBT tag) { super.load(state, tag); long set = tag.getLong("EMC"); if (set > getMaximumEmc()) { set = getMaximumEmc(); } currentEMC = set; } @Nonnull @Override public <T> LazyOptional<T> getCapability(@Nonnull Capability<T> cap, @Nullable Direction side) { if (cap == ProjectEAPI.EMC_STORAGE_CAPABILITY) { if (emcStorageCapability == null || !emcStorageCapability.isPresent()) { //If the capability has not been retrieved yet, or it is not valid then recreate it emcStorageCapability = LazyOptional.of(() -> this); } return emcStorageCapability.cast(); } return super.getCapability(cap, side); } @Override protected void invalidateCaps() { super.invalidateCaps(); if (emcStorageCapability != null && emcStorageCapability.isPresent()) { emcStorageCapability.invalidate(); emcStorageCapability = null; } } }
Human body contour data based activity recognition This research work is aimed to develop autonomous bio-monitoring mobile robots, which are capable of tracking and measuring patients' motions, recognizing the patients' behavior based on observation data, and providing calling for medical personnel in emergency situations in home environment. The robots to be developed will bring about cost-effective, safe and easier at-home rehabilitation to most motor-function impaired patients (MIPs). In our previous research, a full framework was established towards this research goal. In this research, we aimed at improving the human activity recognition by using contour data of the tracked human subject extracted from the depth images as the signal source, instead of the lower limb joint angle data used in the previous research, which are more likely to be affected by the motion of the robot and human subjects. Several geometric parameters, such as, the ratio of height to weight of the tracked human subject, and distance (pixels) between centroid points of upper and lower parts of human body, were calculated from the contour data, and used as the features for the activity recognition. A Hidden Markov Model (HMM) is employed to classify different human activities from the features. Experimental results showed that the human activity recognition could be achieved with a high correct rate.
San Francisco 49ers coach Jim Harbaugh would be the fans' top choice to replace Brady Hoke, should Hoke be fired as Michigan football coach. (Photo: Tony Avelar / Associated Press) ESPN's Adam Schefter believes Jim Harbaugh is "more interested in staying in the pros" than coaching at Michigan. Schefter, who went to Michigan, made his comments on ESPN's "Mike and Mike" radio show Monday morning. The comments were reported by MLive.com Speculation is heavy Michigan will fire coach Brady Hoke. Harbaugh, the San Francisco 49ers' head coach and a former Michigan quarterback, would be the fans' top choice to replace Hoke. "I've had a lot of people ask me, 'Could Jim Harbaugh wind up at Michigan?' I'm not going to tell you he can't, but based on everything I've been told and heard, I do not see that happening," Schefter said during the radio interview, according to MLive.com. On Sunday, Schefter reported if the 49ers make the playoffs this season, Harbaugh could be rewarded with a contract extension in San Francisco. Schefter attributed his information to sources. One NFL source told Schefter the Raiders would be the favorite among NFL teams to land Harbaugh. "To me, the Oakland Raiders are the obvious landing spot," Schefter said Monday during the radion interview. "It would mean he could stay in the Bay Area; I'm sure he and his family are happy about that idea."
/** * Generates a self-signed certificate and saves it in the keystore and truststore. * Should only be used to init the root CA. It is expected that info about the root keystore and the truststore * is available in PKIConfiguration. If they already exists they will be overwritten! * * @param rootCertX500Name The DN of the new root CA Certificate * @param crlUrl CRL endpoint * @param rootCAAlias The alias of the root CA */ public void initRootCA(String rootCertX500Name, String crlUrl, String rootCAAlias, int validityPeriod) { KeyPair cakp = CertificateBuilder.generateKeyPair(null); KeyStore rootks; KeyStore ts; try (FileOutputStream rootfos = new FileOutputStream(pkiConfiguration.getRootCaKeystorePath()); FileOutputStream tsfos = new FileOutputStream(pkiConfiguration.getTruststorePath()) ) { rootks = KeyStore.getInstance(PKIConstants.KEYSTORE_TYPE); rootks.load(null, pkiConfiguration.getRootCaKeystorePassword().toCharArray()); X509Certificate cacert = certificateBuilder.buildAndSignCert(certificateBuilder.generateSerialNumber(null), cakp.getPrivate(), cakp.getPublic(), cakp.getPublic(), new X500Name(rootCertX500Name), new X500Name(rootCertX500Name), null, "ROOTCA", null, crlUrl, null, validityPeriod); Certificate[] certChain = new Certificate[1]; certChain[0] = cacert; rootks.setKeyEntry(rootCAAlias, cakp.getPrivate(), pkiConfiguration.getRootCaKeyPassword().toCharArray(), certChain); rootks.store(rootfos, pkiConfiguration.getRootCaKeystorePassword().toCharArray()); rootks = KeyStore.getInstance(KeyStore.getDefaultType()); rootks.load(null, pkiConfiguration.getRootCaKeystorePassword().toCharArray()); ts = KeyStore.getInstance(KeyStore.getDefaultType()); ts.load(null, pkiConfiguration.getTruststorePassword().toCharArray()); ts.setCertificateEntry(rootCAAlias, cacert); ts.store(tsfos, pkiConfiguration.getTruststorePassword().toCharArray()); } catch (KeyStoreException | NoSuchAlgorithmException | CertificateException | IOException | OperatorCreationException e) { throw new PKIRuntimeException(e.getMessage(), e); } }
def elasticsearch_builder(): if USERNAME: if PROXY: return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection, http_auth=(USERNAME, PASSWORD), verify_certs=INSECURE, proxies=handle_proxy()) else: return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection, http_auth=(USERNAME, PASSWORD), verify_certs=INSECURE) else: if PROXY: return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection, verify_certs=INSECURE, proxies=handle_proxy()) else: return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection, verify_certs=INSECURE)
/* * Copyright (c) 2021. Dell Inc., or its subsidiaries. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * */ package csiapi import ( "context" csi "github.com/container-storage-interface/spec/lib/go/csi" csiext "github.com/dell/dell-csi-extensions/podmon" log "github.com/sirupsen/logrus" "google.golang.org/grpc" "time" ) //Client holds clients related to CSI access type Client struct { DriverConn *grpc.ClientConn // A grpc client connection to the driver PodmonClient csiext.PodmonClient // A grpc CSIPodmonClient ControllerClient csi.ControllerClient // A grpc CSI ControllerClient NodeClient csi.NodeClient // A grpc CSI NodeClient } //CSIClient is reference to CSI Client var CSIClient Client //CSIClientDialRetry is timeout after failure to connect to the CSI Driver var CSIClientDialRetry = 30 * time.Second //NewCSIClient returns a new CSIApi interface func NewCSIClient(csiSock string, clientOpts ...grpc.DialOption) (CSIApi, error) { var err error for { // Wait on the driver. It will not open its unix socket until it has become leader. CSIClient.DriverConn, err = grpc.DialContext(context.Background(), csiSock, clientOpts...) log.Debugf("grpc.Dial returned %v %v", CSIClient.DriverConn, err) if err != nil || CSIClient.DriverConn == nil { var errMsg string if err == nil { errMsg = "No error returned, but CSIClient.DriverConn is nil" } else { errMsg = err.Error() } log.Errorf("Waiting on connection to driver csi.sock: %s", errMsg) time.Sleep(CSIClientDialRetry) } else { break } } log.Infof("Connected to driver: %s", csiSock) CSIClient.PodmonClient = csiext.NewPodmonClient(CSIClient.DriverConn) CSIClient.ControllerClient = csi.NewControllerClient(CSIClient.DriverConn) CSIClient.NodeClient = csi.NewNodeClient(CSIClient.DriverConn) return &CSIClient, nil } //Connected returns true if there is non-nil driver connection func (csi *Client) Connected() bool { return csi.DriverConn != nil } //Close will close connections on the driver connection, if it exists func (csi *Client) Close() error { if csi.Connected() { return csi.DriverConn.Close() } return nil } //ControllerUnpublishVolume calls the UnpublishVolume in the controller func (csi *Client) ControllerUnpublishVolume(ctx context.Context, req *csi.ControllerUnpublishVolumeRequest) (*csi.ControllerUnpublishVolumeResponse, error) { return CSIClient.ControllerClient.ControllerUnpublishVolume(ctx, req) } //NodeUnpublishVolume calls the UnpublishVolume in the node func (csi *Client) NodeUnpublishVolume(ctx context.Context, req *csi.NodeUnpublishVolumeRequest) (*csi.NodeUnpublishVolumeResponse, error) { return CSIClient.NodeClient.NodeUnpublishVolume(ctx, req) } //NodeUnstageVolume calls UnstageVolume in the node func (csi *Client) NodeUnstageVolume(ctx context.Context, req *csi.NodeUnstageVolumeRequest) (*csi.NodeUnstageVolumeResponse, error) { return CSIClient.NodeClient.NodeUnstageVolume(ctx, req) } //ValidateVolumeHostConnectivity calls the ValidateVolumeHostConnectivity in the podmon client func (csi *Client) ValidateVolumeHostConnectivity(ctx context.Context, req *csiext.ValidateVolumeHostConnectivityRequest) (*csiext.ValidateVolumeHostConnectivityResponse, error) { return CSIClient.PodmonClient.ValidateVolumeHostConnectivity(ctx, req) }
/** * Class for creating various kinds of terms. Instances of this class maintain * an internal cache that allows them to re-use the generated objects, which is * useful to safe memory since the same term is often needed in multiple places. * * @author Markus Kroetzsch * */ public class TermFactory { /** * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used * here for mapping VLog ids to terms. * * @author Markus Kroetzsch * * @param <K> * @param <V> */ static class SimpleLruMap<K, V> extends LinkedHashMap<K, V> { private static final long serialVersionUID = 7151535464938775359L; private int maxCapacity; public SimpleLruMap(int initialCapacity, int maxCapacity) { super(initialCapacity, 0.75f, true); this.maxCapacity = maxCapacity; } @Override protected boolean removeEldestEntry(Map.Entry<K, V> eldest) { return size() >= this.maxCapacity; } } final private SimpleLruMap<String, AbstractConstant> abstractConstants; final private SimpleLruMap<String, ExistentialVariable> existentialVariables; final private SimpleLruMap<String, UniversalVariable> universalVariables; final private SimpleLruMap<String, Predicate> predicates; public TermFactory() { this(65536); } public TermFactory(int cacheSize) { abstractConstants = new SimpleLruMap<>(256, cacheSize); existentialVariables = new SimpleLruMap<>(64, 1024); universalVariables = new SimpleLruMap<>(64, 1024); predicates = new SimpleLruMap<>(256, 4096); } /** * Creates a {@link UniversalVariable}. * * @param name name of the variable * @return a {@link UniversalVariable} corresponding to the input. */ public UniversalVariable makeUniversalVariable(String name) { if (universalVariables.containsKey(name)) { return universalVariables.get(name); } else { UniversalVariable result = new UniversalVariableImpl(name); universalVariables.put(name, result); return result; } } /** * Creates an {@link ExistentialVariable}. * * @param name name of the variable * @return a {@link ExistentialVariable} corresponding to the input. */ public ExistentialVariable makeExistentialVariable(String name) { if (existentialVariables.containsKey(name)) { return existentialVariables.get(name); } else { ExistentialVariable result = new ExistentialVariableImpl(name); existentialVariables.put(name, result); return result; } } /** * Creates an {@link AbstractConstant}. * * @param name name of the constant * @return an {@link AbstractConstant} corresponding to the input. */ public AbstractConstant makeAbstractConstant(String name) { if (abstractConstants.containsKey(name)) { return abstractConstants.get(name); } else { AbstractConstant result = new AbstractConstantImpl(name); abstractConstants.put(name, result); return result; } } /** * Creates a {@link DatatypeConstant} from the given input. * * @param lexicalValue the lexical representation of the data value * @param datatypeIri the full absolute IRI of the datatype of this literal * @return a {@link DatatypeConstant} corresponding to the input. */ public DatatypeConstant makeDatatypeConstant(String lexicalValue, String datatypeIri) { return new DatatypeConstantImpl(lexicalValue, datatypeIri); } /** * Creates a {@link LanguageStringConstant} from the given input. * * @param string the string value of the constant * @param languageTag the BCP 47 language tag of the constant; should be in * lower case * @return a {@link LanguageStringConstant} corresponding to the input. */ public LanguageStringConstant makeLanguageStringConstant(String string, String languageTag) { return new LanguageStringConstantImpl(string, languageTag); } /** * Creates a {@link Predicate}. * * @param name non-blank predicate name * @param arity predicate arity, strictly greater than 0 * @return a {@link Predicate} corresponding to the input. */ public Predicate makePredicate(String name, int arity) { String key = name + "#" + String.valueOf(arity); if (predicates.containsKey(key)) { return predicates.get(key); } else { Predicate result = new PredicateImpl(name, arity); predicates.put(key, result); return result; } } }
<gh_stars>0 package org.pega.metrics.prpc.meter; import com.pega.pegarules.pub.clipboard.ClipboardProperty; import io.micrometer.core.instrument.Meter; import io.micrometer.core.instrument.Tags; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.pega.metrics.prpc.cache.PrpcTags; import org.pega.metrics.prpc.source.AbstractPrpcSource; import org.pega.metrics.prpc.source.PrpcCallback; import org.pega.metrics.prpc.source.PrpcSource; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.function.ToDoubleFunction; public class PrpcMultiGauge extends AbstractPrpcMultiMeter { private final String valuePropName; PrpcMultiGauge(PrpcMultiGaugeBuilder builder) { super(builder); this.valuePropName = builder.valuePropName; } @Override protected void registerMeter(Meter.Id rowId, AbstractRow row) { GaugeRow gaugeRow = (GaugeRow) row; getRegistry().gauge(rowId.getName(), row.getTags(), getSource(), gaugeRow.valueFunction); } @Override @SuppressWarnings("unchecked") protected Iterable<AbstractRow> rows(PrpcSource source) { List<AbstractRow> rows = Collections.emptyList(); ClipboardProperty obtained = source.get().orElse(null); if (obtained != null && obtained.isGroup() && !obtained.isEmpty()) { rows = new LinkedList<>(); for (ClipboardProperty item : (Iterable<ClipboardProperty>) obtained) { Tags tags = PrpcTags.of(item.getProperty(((AbstractPrpcSource) source).tagsPropName())); rows.add(new GaugeRow(tags, PrpcCallback.strong(source, tags, valuePropName))); } } return rows; } public static PrpcMultiGaugeBuilder builder(String name) { return new PrpcMultiGaugeBuilder().name(name); } @Override public int hashCode() { return new HashCodeBuilder() .appendSuper(super.hashCode()) .append(valuePropName) .hashCode(); } @Override public boolean equals(Object other) { if (other == null) return false; if (other == this) return true; if (other.getClass() != getClass()) return false; PrpcMultiGauge builder = (PrpcMultiGauge) other; return new EqualsBuilder() .appendSuper(super.equals(other)) .append(valuePropName, builder.valuePropName) .isEquals(); } public static class GaugeRow extends AbstractRow { private ToDoubleFunction<PrpcSource> valueFunction; GaugeRow(Tags tags, ToDoubleFunction<PrpcSource> valueFunction) { super(tags); this.valueFunction = valueFunction; } } public static class PrpcMultiGaugeBuilder extends AbstractPrpcMultiMeterBuilder<PrpcMultiGaugeBuilder> { private String valuePropName; public PrpcMultiGaugeBuilder valuePropName(String valuePropName) { this.valuePropName = valuePropName; return self(); } @Override protected PrpcMultiGaugeBuilder self() { return this; } @Override public PrpcMultiGauge build() { type(Meter.Type.GAUGE); return new PrpcMultiGauge(this); } } }
// Libraries import React, {PureComponent, ChangeEvent} from 'react' import _ from 'lodash' // Components import TabbedPageHeader from 'src/shared/components/tabbed_page/TabbedPageHeader' import {Input, IconFont, ComponentSize, EmptyState} from 'src/clockface' import TaskList from 'src/organizations/components/TaskList' import FilterList from 'src/shared/components/Filter' // Types import {Task} from 'src/api' interface Props { tasks: Task[] orgName: string } interface State { searchTerm: string } export default class Tasks extends PureComponent<Props, State> { constructor(props) { super(props) this.state = { searchTerm: '', } } public render() { const {searchTerm} = this.state const {tasks} = this.props return ( <> <TabbedPageHeader> <Input icon={IconFont.Search} placeholder="Filter tasks..." widthPixels={290} value={searchTerm} onChange={this.handleFilterChange} onBlur={this.handleFilterBlur} /> </TabbedPageHeader> <FilterList<Task> searchTerm={searchTerm} searchKeys={['name', 'owner.name']} list={tasks} > {ts => <TaskList tasks={ts} emptyState={this.emptyState} />} </FilterList> </> ) } private handleFilterBlur = (e: ChangeEvent<HTMLInputElement>): void => { this.setState({searchTerm: e.target.value}) } private handleFilterChange = (e: ChangeEvent<HTMLInputElement>): void => { this.setState({searchTerm: e.target.value}) } private get emptyState(): JSX.Element { const {orgName} = this.props const {searchTerm} = this.state if (_.isEmpty(searchTerm)) { return ( <EmptyState size={ComponentSize.Medium}> <EmptyState.Text text={`${orgName} does not own any Tasks , why not create one?`} highlightWords={'Tasks'} /> </EmptyState> ) } return ( <EmptyState size={ComponentSize.Medium}> <EmptyState.Text text="No Tasks match your query" /> </EmptyState> ) } }
def stringsRearrangement(inputArray): compare = inputArray[0] distances = [] for i in inputArray: current_distance = distance(compare,i) distances.append(current_distance) return metadata_check(distances) def metadata_check(distances): distances = sorted(distances) prev = distances[0] count = 0 frequencies = [] for i in distances: if i == prev: count += 1 continue elif i == prev + 1: frequencies.append(count) count = 1 prev = i continue else: return False frequencies.append(count) prev = frequencies[0] if len(frequencies) == 1: return False # for i in frequencies: # if i == prev or i == prev + 1 or i == prev - 1: # continue # else: # return False return True def distance(a,b): # this function takes two strings, and tells you # how many mutations it would take to get from one string to the other # the assumption is that both strings are the same size # there are no swaps allowed. Only mutations. count = 0 # counting the differences for i, j in zip(a,b): if i == j: continue else: count += 1 return count def distance_bool(a,b): # returns true if the distance # between the strings is 1 # and false otherwise. # remember we are looking for a distance of exactly 1 # zeros aren't allowed count = 0 for i, j in zip(a,b): if i == j: continue else: if count == 1: return False else: count = 1 return True # this one would work for the perumtation method: def sequence(inputArray): # this just checks if the sequence is true or false for i in range(1,len(inputArray)): if distance_bool(inputArray[i],inputArray[i-1]): continue else: return False return True # data = ["zzzzab", # "zzzzbb", # "zzzzaa"] # print(stringsRearrangement(data))
/** * Finds a AWS VPC with the specified CIDR. * * @param ec2 {@link AmazonEC2} * @param cidr CIDR * @return {@link Vpc} describing the VPC with the specified CIDR, otherwise * <code>null</code>. */ private static Vpc findVpcByCidr(AmazonEC2 ec2, String cidr) { DescribeVpcsRequest vReq = new DescribeVpcsRequest() .withFilters(new Filter().withName("cidr").withValues(cidr)); DescribeVpcsResult vRes = ec2.describeVpcs(vReq); return vRes.getVpcs().stream().findFirst().orElse(null); }
/** * The abstract parent of all clinical Entry subtypes. A CareEntry defines * protocol and guideline attributes for all clinical Entry subtypes. * * @author Yin Su Lim * @version 1.0 */ public abstract class CareEntry extends Entry { /** * Construct an Entry * * @param archetypeNodeId * @param name * @param subject * @param provider * @param protocol null if unspecified * @param actID null if unspecified * @param guidelineId null if unspecified * @param otherParticipations null if unspecified * @throws IllegalArgumentException if archetypeNodeId or name null, * or subject or provider null or invalid */ protected CareEntry(UIDBasedID uid, String archetypeNodeId, DvText name, Archetyped archetypeDetails, FeederAudit feederAudit, Set<Link> links, Pathable parent, CodePhrase language, CodePhrase encoding, PartyProxy subject, PartyProxy provider, ObjectRef workflowId, List<Participation> otherParticipations, ItemStructure protocol, ObjectRef guidelineId, TerminologyService terminologyService) { super(uid, archetypeNodeId, name, archetypeDetails, feederAudit, links, parent, language, encoding, subject, provider, workflowId, otherParticipations, terminologyService); this.protocol = protocol; this.guidelineId = guidelineId; } /** * Optional external identifier of guideline creating this action * if relevant * * @return guidelineId */ public ObjectRef getGuidelineId() { return guidelineId; } /** * Description of the method the information in this entry was arrived at. * * @return protocol */ public ItemStructure getProtocol() { return protocol; } //POJO start CareEntry() { } public void setGuidelineId(ObjectRef guidelineId) { this.guidelineId = guidelineId; } void setProtocol(ItemStructure protocol) { this.protocol = protocol; } //POJO end /* fields */ private ItemStructure protocol; private ObjectRef guidelineId; /* static fields */ public static final String PROTOCOL = "protocol"; }
package main import ( "fmt" "os" "time" "github.com/natefinch/lumberjack" "go.uber.org/zap" "go.uber.org/zap/zapcore" ) func syslogTimeEncoder(t time.Time, enc zapcore.PrimitiveArrayEncoder) { enc.AppendString(fmt.Sprintf("|%s|", t.Format("2006-01-02T15:04:05"))) } func filelogTimeEncoder(t time.Time, enc zapcore.PrimitiveArrayEncoder) { enc.AppendString(fmt.Sprintf("%s", t.Format("2006-01-02T15:04:05"))) } func customLevelEncoder(level zapcore.Level, enc zapcore.PrimitiveArrayEncoder) { enc.AppendString(fmt.Sprintf("[%v]", level.CapitalString())) } func customEncodeCaller(caller zapcore.EntryCaller, enc zapcore.PrimitiveArrayEncoder) { enc.AppendString(fmt.Sprintf("%d", caller.Line)) } func main() { terminalEncoder := zapcore.NewConsoleEncoder(zapcore.EncoderConfig{ MessageKey: "M", LevelKey: "L", TimeKey: "T", NameKey: "N", CallerKey: "C", StacktraceKey: "S", LineEnding: zapcore.DefaultLineEnding, EncodeLevel: customLevelEncoder, EncodeTime: syslogTimeEncoder, EncodeDuration: zapcore.StringDurationEncoder, EncodeCaller: zapcore.ShortCallerEncoder, }) terminalOutput := zapcore.AddSync(os.Stderr) fileEncoder := zapcore.NewJSONEncoder(zapcore.EncoderConfig{ MessageKey: "M", LevelKey: "L", TimeKey: "T", NameKey: "N", CallerKey: "C", StacktraceKey: "S", LineEnding: zapcore.DefaultLineEnding, EncodeLevel: zapcore.CapitalLevelEncoder, EncodeTime: filelogTimeEncoder, EncodeDuration: zapcore.StringDurationEncoder, EncodeCaller: zapcore.ShortCallerEncoder, }) fileOutput := zapcore.AddSync(&lumberjack.Logger{ Filename: "logs.json", MaxSize: 100, // megabytes MaxBackups: 10, MaxAge: 28, // days Compress: true, }) InfoLevel := zap.LevelEnablerFunc(func(lvl zapcore.Level) bool { return lvl >= zapcore.InfoLevel }) ErrorLevel := zap.LevelEnablerFunc(func(lvl zapcore.Level) bool { return lvl >= zapcore.ErrorLevel }) core2 := zapcore.NewTee( zapcore.NewCore(terminalEncoder, terminalOutput, InfoLevel), zapcore.NewCore(fileEncoder, fileOutput, ErrorLevel), ) logger := zap.New(core2, zap.AddCaller(), zap.AddStacktrace(zapcore.ErrorLevel)) defer logger.Sync() logger.Info("logger.info") logger.Error("logger.Error ") i := 0 for { i++ logger.Error(fmt.Sprintf("logger.Error %d", i)) } }
package br.game.castleduel; import br.game.castleduel.exception.PlayerException; import br.game.castleduel.gui.GuiInterface; import br.game.castleduel.gui.ServerGui; import br.game.castleduel.gui.WindowGui; import br.game.castleduel.player.PlayerFacade; import br.game.castleduel.player.PlayerInfo; import br.game.castleduel.time.FixedTimeRunnable; import br.game.castleduel.time.GameTime; import br.game.castleduel.unit.Unit; public class Game implements FixedTimeRunnable { protected GameTime time; protected Battleground battleground; protected PlayerFacade players; protected GuiInterface gui; protected Bank bank; protected int playerWonNumber = -1; public String play(boolean isServer, int fps) throws PlayerException { try { loadPlayers(); loadGameLogic(isServer, fps); runGameLoop(isServer); finish(); } catch (PlayerException e) { if (players == null) { throw e; } playerWonNumber = e.player != 1 ? 1 : 2; } if (playerWonNumber == 0) { return null; } return players.getFilename(playerWonNumber); } protected void loadPlayers() throws PlayerException { players = new PlayerFacade(); } protected void loadGameLogic(boolean isServer, int fps) throws PlayerException { time = new GameTime(fps); gui = isServer ? new ServerGui() : new WindowGui(players.callGetName(0), players.callGetName(1)); battleground = new Battleground(gui); bank = new Bank(); } protected void runGameLoop(boolean isServer) { if (playerWonNumber == -1) { if (isServer) { runGameLoopServer(); } else { runGameLoopWindow(); } } } protected void runGameLoopServer() { while (!battleground.isFinished() && time.canContinue()) { runBattle(); time.nextFrame(); } } protected void runGameLoopWindow() { while (!battleground.isFinished() && time.canContinue()) { time.runWithSleep(this); } } @Override public void runWithFixedTime() { runBattle(); gui.updateGame(time.getFramesLeft()); } protected void runBattle() { if (time.canPlayersPlay()) { runPlayers(); } if (time.canReceiveGold()) { bank.increaseGold(); } battleground.executeBattle(); gui.setGold(bank.get(0), bank.get(1)); } protected void runPlayers() { for (int playerIndex = 0; playerIndex < 2; playerIndex++) { final PlayerInfo info = battleground.getPlayerInfo(playerIndex); info.gold = bank.get(playerIndex); final int unitIndex = players.callPlay(info); final Unit unit = bank.buyUnit(playerIndex, unitIndex); if (unit != null) { battleground.addUnit(unit); gui.addSprite(unit.getSprite()); } } } protected void finish() { setPlayerWonNumber(); gui.setPlayerWon(playerWonNumber); gui.updateGame(0); } protected void setPlayerWonNumber() { if (playerWonNumber == -1) { PlayerInfo player0 = battleground.getPlayerInfo(0); if (player0.castle > player0.castleEnemy) { playerWonNumber = 1; } else if (player0.castle < player0.castleEnemy) { playerWonNumber = 2; } else { playerWonNumber = 0; } } } }
/** * Test using data taken from sample data file. * Check that the sampleCount, mu and sigma match data in the sample data file. */ @Test public void testDoubleLoad() { Assert.assertEquals(empiricalDistribution.getSampleStats().getN(), 1000, 1e-7); Assert.assertEquals(empiricalDistribution.getSampleStats().getMean(), 5.069831575018909, 1e-7); Assert.assertEquals(empiricalDistribution.getSampleStats().getStandardDeviation(), 1.0173699343977738, 1e-7); double[] bounds = empiricalDistribution.getGeneratorUpperBounds(); Assert.assertEquals(bounds.length, 100); Assert.assertEquals(bounds[99], 1.0, 10e-12); }
def note_name(note: int) -> str: notes = ["C", "Cs", "D", "Ds", "E", "F", "Fs", "G", "Gs", "A", "As", "B"] name = notes[note % len(notes)] octave = note // 12 - 1 return f"{name}{octave}"
<filename>packages/analyzer/src/test/core/async-work-cache_test.ts<gh_stars>100-1000 /** * @license * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. * This code may only be used under the BSD style license found at * http://polymer.github.io/LICENSE.txt * The complete set of authors may be found at * http://polymer.github.io/AUTHORS.txt * The complete set of contributors may be found at * http://polymer.github.io/CONTRIBUTORS.txt * Code distributed by Google as part of the polymer project is also * subject to an additional IP rights grant found at * http://polymer.github.io/PATENTS.txt */ import {CancelToken} from 'cancel-token'; import {assert} from 'chai'; import {AsyncWorkCache} from '../../core/async-work-cache'; import {assertIsCancelled, invertPromise} from '../test-utils'; suite('AsyncWorkCache', () => { let cache: AsyncWorkCache<string, string>; setup(() => { cache = new AsyncWorkCache<string, string>(); }); test('it works for the simple happy case', async () => { assert.equal(await cache.getOrCompute('key', async () => 'cool'), 'cool'); // 'cool' was already cached. assert.equal(await cache.getOrCompute('key', async () => 'neat'), 'cool'); }); test('it handles parallel calls', async () => { // Only the first one actually runs const promises = [ cache.getOrCompute('key', async () => 'good'), cache.getOrCompute( 'key', async () => { throw new Error('Should not be called'); }), cache.getOrCompute( 'key', async () => { throw new Error('Should not be called'); }), ]; assert.deepEqual(await Promise.all(promises), ['good', 'good', 'good']); // Errors are cached too const failurePromises = [ cache.getOrCompute( 'badkey', async () => { throw new Error('failed'); }), cache.getOrCompute('badkey', async () => 'good'), cache.getOrCompute('badkey', async () => 'good'), ].map(invertPromise); assert.deepEqual( (await Promise.all(failurePromises)).map((e) => e!.message!), ['failed', 'failed', 'failed']); }); test('it handles a cancellation followed by a new request', async () => { const source = CancelToken.source(); const promise1 = cache.getOrCompute('key', async () => { while (true) { await Promise.resolve(); source.token.throwIfRequested(); } }, source.token); source.cancel(); await assertIsCancelled(promise1); const source2 = CancelToken.source(); const promise2 = cache.getOrCompute('key', async () => { await Promise.resolve(); return 'finished!'; }, source2.token); assert.equal(await promise2, 'finished!'); }); const testName = `many parallel calls to getOrCompute, some that cancel,` + ` some that don't`; test(testName, async () => { const cancelledPromises: Promise<string>[] = []; for (let i = 0; i < 10; i++) { const source = CancelToken.source(); cancelledPromises.push(cache.getOrCompute('key', async () => { while (true) { await Promise.resolve(); source.token.throwIfRequested(); } }, source.token)); source.cancel(); } assert.equal(await cache.getOrCompute('key', async () => { return 'cool'; }), 'cool'); for (const cancelled of cancelledPromises) { await assertIsCancelled(cancelled); } }); });
/** * Get the Streett acceptance condition that is the dual of this Rabin acceptance condition, i.e., * any word that is accepted by this condition is rejected by the returned Streett condition. * @return the complement Streett acceptance condition */ public AcceptanceStreettDD complementToStreett() { AcceptanceStreettDD accStreett = new AcceptanceStreettDD(); for (RabinPairDD accPairRabin : this) { JDDNode R = accPairRabin.getK(); JDDNode G = accPairRabin.getL(); AcceptanceStreettDD.StreettPairDD accPairStreett = new AcceptanceStreettDD.StreettPairDD(R, G); accStreett.add(accPairStreett); } return accStreett; }
// GetMappingByInstanceId returns all things mapped to the instance with the given id. func (m *DBClient) GetMappingByInstanceId(ctx context.Context, instanceId string) ([]connector.ThingMapping, error) { var thingMappings []connector.ThingMapping err := m.DB.Select(&thingMappings, statementGetThingsByInstanceID, instanceId) if err != nil && err != sql.ErrNoRows { return nil, fmt.Errorf("failed to retrieve thing ids %v", err) } return thingMappings, nil }
Overall, 54.6% of students reported using SNSs for 2 h or less per day, 28.0% reported using them for more than 2 h d −1 and 17.4% reported infrequent or no use of SNSs (reference category). After adjustment for covariates, results showed that adolescent women who use SNSs for more than 2 h d −1 had greater odds of dissatisfaction with body weight (odds ratio = 2.02; 95% confidence interval [CI]: 1.30–3.16). More specifically, they were more likely to perceive themselves as overweight (relative risk ratio [RRR] = 2.20; 95% CI: 1.34−3.60) compared with those who reported infrequent or no use of SNSs. Conversely, men who use SNSs for 2 h or less per day presented a lower risk for perceiving themselves as overweight (RRR = 0.68; 95% CI: 0.47−0.98) but not those who use SNSs for more than 2 h d −1 . Women who use SNSs for more than 2 h d −1 reported a greater likelihood of trying to lose weight (RRR = 2.52; 95% CI: 1.62−3.90). Social networking sites (SNSs) not only offer users an opportunity to link with others but also allow individuals to compare themselves with other users. However, the link between the use of SNSs and the dissatisfaction with body weight is largely unknown. We investigated the associations between the use of SNSs and the perception of body weight and related behaviours among adolescent men and women. Introduction Social networking sites (SNSs) such as Facebook and MySpace are an opportunity to link with others and share aspects of their lives. SNSs, however, also allow users to post photos, which provide users with an opportunity to compare their appearance with others, thus placing users more at risk of body dissatisfaction and eating disorders 1-3. With an estimated 350 million photos uploaded on Facebook each day, research studies are needed to examine the possible link between the use of SNSs and the dissatisfaction with body weight among youth. Unlike traditional media such as TV and magazines, everybody can post pictures of themselves on social media. This offers more opportunity to compete against each other for a more attractive appearance, as users of SNSs are exposed to unrealistic and idealized body types that are based on the stereotype of a lean body for women and a muscular body for men. While perceived weight, or body image, does not always reflect reality 4, 5, having a negative body image is usually related to low self‐esteem and unhealthy eating and weight control behaviours 6, 7. Research studies on the link between media and body image have largely focused on traditional forms of media 8-10. Exposure to the thin ideal in media has been linked to body dissatisfaction and internalization of the thin ideal. More specifically, exposure to images of attractive women has been indicated to have a negative effect on womens' appreciation of their body 10. Posavac et al. 11 demonstrated that the extent of the discrepancy women perceive between their own attractiveness and body image representative of ideal feminine attractiveness presented in advertising and the broader media predicts how concerned they are with their weight. Similarly, body dissatisfaction has been reported among men when they view images of attractive male bodies 12, 13. However, although SNSs have become part of most adolescents' lives, very few studies have explored the associations between the use of these web‐based platforms and the body image, and findings indicate that SNSs, particularly Facebook, have been related to body dissatisfaction and drive for thinness among adolescent women 14-16. For example, Meier and Gray 16 documented significant correlations of the amount of time allocated to photo activity on Facebook with weight dissatisfaction, drive for thinness, thin ideal internalization and self‐objectification in a sample of adolescent women aged 12 to 18 years. Similarly, Tiggemann and Salter 14, 15 indicated that preadolescent and adolescent female Facebook users report more appearance concerns and dieting behaviour than nonusers and that this link significantly increases with the amount of time spent on SNSs. However, these studies used relatively small sample sizes and were conducted on samples of women only 14-16. No study has examined the link between the use of SNSs and the dissatisfaction with body weight in adolescent men. This is of particular importance because previous studies have shown that exposure to traditional forms of media is related to a desire to build muscle among adolescent men 17, 18. It is possible that SNSs would have a similar influence. It is also possible that the use of SNSs of 2 h or more per day, which is just above the screen time recommendation for adolescents 19, would be associated with body‐weight dissatisfaction in adolescents. The main objective of this observational, cross‐sectional study was to investigate the associations between the use of SNSs and the perception of body weight and related behaviours among Canadian adolescent men and women using province‐wide representative data from the Ontario Student Drug Use and Health Survey (OSDUHS). It was hypothesized that both male and female heavy users of SNSs (i.e. more than 2 h a day) would be more likely to report negative body image and a desire to do something about their body weight, differentially depending on sex. Methods Study design Carried out biennially in Ontario since 1977, the OSDUHS is the longest ongoing school survey in Canada and one of the longest in the world. The survey assesses the prevalence of self‐reported health‐risk behaviours among students in grades 7 to 12 (aged 11 to 20 years) 20. The OSDUHS uses a two‐stage cluster sample design involving a random selection of classes from within a random selection of schools (probability proportional to size) stratified by region and school type. The study was approved by the Research Ethics Board of the Centre for Addiction and Mental Health and York University, as well as existing research review committees of participating school boards. Written informed consent was obtained from parents/guardians, and consent/assent was obtained from students prior to participating in the survey. Further methodological details are available online 20. Sample A total of 10,272 students from 42 school boards, 198 schools and 671 classrooms completed the 2013 cycle of the OSDUHS. The student response rate was 63%, which is above average for a survey of students that requires active parental consent 21. Student nonresponse was due to absenteeism (11%) and unreturned consent forms or parental refusal (26%) 20. Only the random half sample of students who were selected to complete the Form B questionnaire (N = 4,794) that included questions on body‐weight perception and weight‐related attitude was included in the current study. The analytic sample comprised the 4,468 students (93.2%) for whom there were no missing data on measures including outcomes, explanatory variables and covariates. Measures Outcome variables Perception and intentions regarding body weight were measured using items that were adapted from the Centres for Disease Control and Prevention's Youth Risk Behaviour Survey. Both questions have demonstrated good reliability and validity among students. Perception of body weight Students were asked about how they perceived their body weight by identifying whether they felt that they were too thin, about the right weight or too fat. A second measure was constructed to represent dissatisfaction with body weight such that responses of ‘too thin’ or ‘too fat’ reflected the presence of dissatisfaction with body weight, whereas responses of ‘about the right weight’ reflected the absence of such dissatisfaction. Intentions regarding body weight Students were asked what they were doing about their body weight and identified whether they were (i) not doing anything; (ii) trying to lose weight; (iii) trying to keep from gaining weight or (iv) trying to gain weight. All four response options were used for analyses with ‘not doing anything’ treated as the reference category. Explanatory variable Use of social networking sites Students were asked how many hours a day they usually spend on social media websites such as Facebook, Twitter, MySpace and Instagram, either posting or browsing. The answer options were as follows: ‘less than 1 h a day’, ‘about 1 h a day’, ‘2 h a day’, ‘3 to 4 h a day’, ‘5 to 6 h a day’, ‘7 or more hours a day’, ‘visit these websites, but not daily’, ‘use the internet, but never visit these’ and ‘do not use the internet’. A three‐category measure was constructed wherein the three latter response options were combined to reflect infrequent or no use of SNSs (reference category); the first three categories were combined to reflect daily use of 2 h or less (regular use), and the remaining categories reflected daily use of more than 2 h (heavy use). The category of 2 h or less for daily recreational screen time among youth is the recommended cutoff from the current Canadian Sedentary Behaviour Guidelines 19 and a previous study 22. Covariates Sociodemographic characteristics Sociodemographics included sex, age (measured in years), ethnicity, subjective socioeconomic status (SES) and parental education. Ethnicity was measured using responses to a question asking students to select one or more categories that best described their ethnic background. The categories listed were similar to those used in the 2006 Canadian Census 23. Students who selected only one ethnic background were grouped into five categories including Caucasian, African–American, East or Southeast Asian, South Asian and other. Students who selected multiple ethnic backgrounds were coded as other. Subjective SES was measured using the youth version of the MacArthur Scale of Subjective Social Status 24. Minor modifications were made to the youth scale to assess the family's place within society. The question was presented with a drawing of a ladder with 10 rungs that was described as follows: Imagine this ladder below shows how Canadian society is set up. At the top of the ladder are people who are the ‘best off’ – they have the most money, the most education, and the jobs that bring the most respect. At the bottom are the people who are ‘worst off’ – they have the least money, little education, no job or jobs that no one wants. Now think about your family. Please check off the numbered box that best shows where you think your family would be on this ladder. For analysis purposes, a dichotomous measure was constructed to represent low (<7) and high (≥7) subjective SES, wherein low scores represent those below the mean 25 For analysis purposes, a dichotomous measure was constructed to represent low (<7) and high (≥7) subjective SES, wherein low scores represent those below the mean. Parental education was measured using the following items: ‘How far did your father go in school?’ and ‘How far did your mother go in school?’ Response options referred to attended or graduated high school, college or university and did not attend high school. For analysis purposes, response options were grouped to reflect the education of the parent with the highest level of education: university degree, some college/university degree, high school or less or do not know. Statistical analysis Taylor series linearization methods within stata (version 13.0, Stata Corp, College Station, Texas, USA) were used in all analyses to adjust for the stratified and clustered complex survey design. Sampling weights were included in analyses to adjust for unequal probability of selection 20. Descriptive characteristics of participants by time spent using SNSs and bivariate associations between time spent using SNSs and perceived body weight and weight‐related attitudes were tested by Pearson chi‐square adjusted for the survey design and transformed into an F‐statistic for categorical data and by an adjusted Wald test for continuous data. Given that sex by the use of SNSs interaction was significant for the outcome variable of perceived body weight, all analyses were stratified by sex. Multivariate associations between time spent using SNSs (independent variable where infrequent or no use of SNSs was treated as the reference category) and dissatisfaction with body weight, perceived body weight and intentions regarding body weight (dependent variables) were tested with both binary and multinomial logistic regressions. Covariates included age, ethnicity, subjective SES and parental education. Statistical analyses were conducted at a threshold of α = 0.05. Results Descriptive characteristics of the study sample according to the amount of time spent on SNSs are provided in Table 1. Of the 4,468 students who were included in our analysis, 48.5% were women, 58.8% identified themselves as Caucasian, and 68.7% were from higher SES families. Overall, 54.6% of students reported using SNSs for 2 h or less per day, 28.0% reported using them for more than 2 h day−1, and 17.4% reported infrequent or no use of SNSs. Women were more likely than men to report using SNSs for more than 2 h day−1 (36.6% vs. 19.9%, p < 0.001). Students of African–American ethnic background were more likely than Caucasian to report using SNSs for more than 2 h day−1 (39.8% vs. 26.8%, p = 0.04). Table 1. Time spent using social networking sites by demographic characteristics Total sample (N = 4,468) % Infrequent or no use (N = 927) % Daily use of 2 h or less (N = 2,359) % Daily use of more than 2 h (N = 1,182) % Total 17.4 54.6 28.0 Sex***† Men 51.5 21.7 58.4 19.9 Women 48.5 12.8 50.6 36.6 Ethnicity**† Caucasian 58.8 14.9 58.4 26.8 African–American 7.1 19.9 40.4 39.8 East/Southeast Asian 10.4 20.9 50.0 29.1 South Asian 11.4 22.1 53.9 24.0 Other 12.3 20.9 49.4 29.7 Subjective SES† Low 31.3 17.7 50.6 31.6 High 68.7 17.3 56.4 26.3 Parental education***† University degree 22.7 18.8 58.4 22.7 Some college/university 27.3 16.5 57.5 26.0 High school or less 27.4 12.7 52.7 34.6 Do not know 22.5 22.7 49.6 27.7 Age***‡ mean (years) ± SD 15.3 ± 1.9 14.5 ± 1.7 15.4 ± 1.9 15.6 ± 1.7 Table 2 presents the prevalence of dissatisfaction with body weight and related attitudes by sex. Overall, 35.5% of students were dissatisfied with their body weight, 23.7% perceived themselves as overweight, and 23.8% were trying to lose weight. Women were more likely than men to be dissatisfied with their body weight (40.0% vs. 31.2%, p < 0.001). They were more likely to perceive themselves as overweight than men (32.6 vs. 15.3%, p < 0.001), while men were more likely than women to perceive themselves as underweight (15.9% vs. 7.4%, p < 0.001). Women were more likely to try to lose weight than men (39.0% vs. 21.2%, p < 0.001), while men were more likely to try to gain weight than their female counterparts (21.9% vs. 5.2%, p < 0.001). Table 2. Perception and intention regarding body weight by sex Total sample (N = 4,468) % Men (N = 2,014) % Women (N = 2,454) % Dissatisfaction with body weight***† No 64.5 68.8 60.0 Yes 35.5 31.2 40.0 Perceived body weight***† About right weight 64.5 68.8 60.0 Underweight 11.8 15.9 7.4 Overweight 23.7 15.3 32.6 Intention regarding body weight***† Not doing anything 33.7 38.0 29.3 Trying to lose weight 29.8 21.2 39.0 Trying to keep from gaining weight 22.6 18.9 26.6 Trying to gain weight 13.8 21.9 5.2 Table 3 presents the results of multivariate logistic regression analyses testing the associations between the use of SNSs and the perceptions and intentions regarding body weight among adolescent women. After adjustment for age, ethnicity, subjective SES and parental education, results from logistic regression analysis showed that adolescent women who use SNSs for more than 2 h day−1 had greater odds of dissatisfaction with body weight (odds ratio = 2.02; 95% confidence interval [CI]: 1.30–3.16). More specifically, results from multinomial logistic regression indicated that women who use SNSs for more than 2 h day−1 were more likely to perceive themselves as overweight (relative risk ratio [RRR] = 2.20; 95% CI = 1.34–3.60) compared with those who reported infrequent or no use of SNSs. Women who use SNSs for more than 2 h day−1 had a greater likelihood of trying to lose weight (RRR = 2.51; 95% CI = 1.62–3.90). Table 3. Associations between the use of social networking sites and the perception and intentions regarding body weight among adolescent women Unadjusted† Adjusted†, ‡ Daily use of 2 h or less OR/RRR (95% CI)§ Daily use of more than 2 h OR/RRR (95% CI) Daily use of 2 h or less OR/RRR (95% CI) Daily use of more than 2 h OR/RRR (95% CI) Dissatisfaction with body weight¶ No 1 1 1 1 Yes 1.05 (0.70–1.58) 2.18 (1.45–3.29)*** 1.05 (0.68–1.62) 2.02 (1.30–3.16)** Perceived body weight∥ About right weight 1 1 1 1 Underweight 0.70 (0.35–1.40) 1.55 (0.81–2.99) 0.81 (0.39–1.67) 1.67 (0.84–3.32) Overweight 1.18 (0.76–1.83) 2.40 (1.51–3.83)*** 1.13 (0.70–1.84) 2.20 (1.34–3.60)** Attitude about body weight∥ Not doing anything 1 1 1 1 Trying to lose weight 1.58 (0.97–2.57) 2.95 (1.95–4.49)*** 1.45 (0.86–2.45) 2.51 (1.62–3.90)*** Trying to keep from gaining weight 1.48 (0.94–2.35) 1.87 (1.17–2.99)** 1.31 (0.81–2.11) 1.61 (0.99–2.61) Trying to gain weight 1.33 (0.60–2.92) 2.25 (0.98–5.17) 1.56 (0.71–3.41) 2.31 (0.99–5.37) Results of multivariate logistic regression analyses on the associations between the use of SNSs and the perception and intentions regarding body weight among adolescent men are provided in Table 4. Unadjusted multinomial logistic regression analyses indicated that men who use SNSs for more than 2 h day−1 had a greater likelihood of perceiving themselves as overweight (RRR = 1.86; 95% CI = 1.13–3.04) compared with those who reported infrequent or no SNSs use; however, this association became nonsignificant after adjustment for age, ethnicity, subjective SES and parental education. Conversely, adjusted analyses revealed that men who use SNSs for 2 h or less per day presented a lower likelihood of perceiving themselves as overweight (RRR = 0.68; 95% CI = 0.47–0.98) but not those who use SNSs for more than 2 h day−1 (Table 4). Table 4. Associations between the use of social networking sites and the perception and intentions regarding body weight among adolescent men Unadjusted† Adjusted†, ‡ Daily use of 2 h or less OR/RRR (95% CI)§ Daily use of more than 2 h OR/RRR (95% CI) Daily use of 2 h or less OR/RRR (95% CI) Daily use of more than 2 h OR/RRR (95% CI) Dissatisfaction with body weight¶ No 1 1 1 1 Yes 1.04 (0.75–1.43) 1.26 (0.78–2.02) 1.01 (0.73–1.39) 1.14 (0.69–1.88) Perceived body weight∥ About right weight 1 1 1 1 Underweight 1.64 (0.99–2.70) 1.86 (1.13–3.04)* 1.55 (0.94–2.54) 1.65 (0.99–2.76) Overweight 0.71 (0.49–1.01) 0.93 (0.46–1.85) 0.68 (0.47–0.98)* 0.87 (0.43–1.73) Attitude about body weight ∥ Not doing anything 1 1 1 1 Trying to lose weight 1.04 (0.65–1.66) 1.57 (0.78–3.16) 1.11 (0.70–1.76) 1.75 (0.87–3.51) Trying to keep from gaining weight 0.75 (0.48–1.17) 0.91 (0.52–1.59) 0.77 (0.47–1.25) 0.95 (0.51–1.75) Trying to gain weight 1.30 (0.87–1.93) 1.67 (0.97–2.86) 1.05 (0.70–1.59) 1.33 (0.74–2.41) Discussion Our study provides evidence of associations between the use of SNSs and the negative perception of body weight and weight control behaviours. However, the relationship between SNSs and dissatisfaction with body weight and related attitudes greatly varies by gender and by the amount of time spent on SNSs. Our results showed that adolescent women who use SNSs for more than 2 h day−1 had greater odds of dissatisfaction with body weight, were more likely to perceive themselves as overweight and were more at risk of trying to lose weight. Men who use SNSs for 2 h or less per day presented a lower risk for perceiving themselves as overweight, and this was the only significant association found. Collectively, these findings suggest that the connection between the use of SNSs and the weight‐related attitude is stronger in women than men and future studies should try to better explore this discrepancy in order to better inform health strategies. These results are consistent with previous studies showing that the use of SNSs, particularly Facebook, is related to body dissatisfaction and drive for thinness among adolescent women 14-16. Appearance comparison has been indicated to be a possible mechanism explaining the association between the use of Facebook and the body image concerns 3, 14, 15. Adolescent women may become concerned that their own weight is not acceptable when they perceive a discrepancy between their friends' bodies representing an ideal body or accepted standard of female attractiveness and their own bodies 26. Such a comparison occurring on SNSs may be more devastating than that of traditional media because most of the pictures posted on SNSs are of friends or friends of friends. This likely offers adolescents more opportunity to compete against each other for a more attractive appearance. It is worth mentioning that SNSs are not necessarily bad for adolescents, but it depends on how they are using them. For example, Meier and Gray 16 recently reported that elevated appearance exposure, but not overall Facebook usage, was significantly correlated with weight dissatisfaction, drive for thinness, thin ideal internalization and self‐objectification. Given that parental influences via verbal messages and active encouragement have been shown to have greater impact on offspring's body concerns and eating behaviours among adolescents 27, future prevention efforts targeting parents are necessary to encourage them to engage in an open conversation with adolescents about the idealized nature of pictures posted on SNSs and the negative impact that comparing such pictures can have on their health. Our results showed that adolescent men who use SNSs for 2 h or less per day have lower risk of perceiving themselves as overweight. To the best of our knowledge, this is the first study to report an association between the use of SNSs and the perceived body weight in a subpopulation of adolescent men. These results are somewhat concordant with previous research showing a link between traditional media exposure and perception of body weight among adolescent men 17, 28, 29. Men are generally concerned about muscularity and engage in behaviours to increase weight and musculature 17, 28, 29. These findings provide further support for the public health guidelines recommending that children and youth limit their screen time to no more than 2 h day−1 19, 30. The use of SNSs and dissatisfaction with body weight are both known to be more prevalent among women than men 31-33. Women are typically more likely to use SNSs and use them for more hours than their male counterparts 22. More work is therefore needed to explore the differences between adolescent men and women on the relationships between the use of SNSs and the perception of body weight and weight control behaviours. Contrary to our findings and those of other correlational studies among female high school and university students 3, 14-16, Fardouly et al. 34 recently reported in their experimental study of 112 university women that exposure to Facebook was not related to dissatisfaction with body weight or to the desire to change weight and shape. They found that women who tend to make more appearance comparisons on Facebook have a greater desire to change their face, hair and skin‐related appearance 34. The discrepant findings are likely to be due to differences in studies' methodology and population. Although observational, the present study used a large and representative sample of middle and high school students in a Canadian province and used a more general measure of use of SNSs, rather than focusing on Facebook only. Further research is needed to disentangle these mixed results on the relationship between the use of SNSs and the body dissatisfaction. Our results need to be interpreted in light of the following. First, the cross‐sectional design limits the ability to make causal inferences about the relationships observed. Second, the data used were based on self‐reports, and thus, bias related to such reports may be an issue, particularly for more sensitive measures such as dissatisfaction with body weight. Third, the use of single questions to measure perception and intentions regarding body weight may raise potential issues related to reliability. Additionally, these measures have not been previously validated. Fourth, the SNS measure provides information on the amount of time spent on SNSs but not on the way participants are using those web‐based platforms. For example, it does not differentiate passive engagement (simple browsing) from active engagement such as posting, liking and commenting. Another limitation is that the study population does not include approximately 8% of students within the regular school system in Ontario, mostly from private and alternate schools. It is possible that this excluded group of adolescents differs with respect to the use of SNSs and dissatisfaction with body weight. Thus, the external generalizability of our findings may be restricted to the study population. Finally, although our analyses adjusted for important covariates such as SES, ethnicity and parental education 35-37, we did not include parental weight – a variable not measured in this survey. Tienboon et al. 38 reported that adolescent girls who tried to lose weight had significantly heavier mothers, but not fathers, than those who had not attempted to lose weight. Future studies may investigate the role of parental weight within the associations between use of SNSs and adolescent body‐weight perception and attitude. In conclusion, the present study is the first to provide evidence that daily use of SNSs of more than 2 h among adolescent women is related to greater odds of dissatisfaction with body weight and the desire to lose weight and that daily use of SNSs of 2 h or less was related to lower risk of self‐perception of being overweight among men. Although replication studies are needed, our findings provide support for education around self‐esteem and body image and reduction of time spent on SNSs as a possible means to prevent dissatisfaction with body appearance among adolescents. Prevention and intervention programmes fostering self‐esteem and positive body image among adolescents, particularly women, are needed in order to protect them from idealized nature of images posted on SNSs and risk related to comparisons. Conflict of Interest Statement No conflict of interest was declared. Acknowledgements H. S. K. and J. P. C conceived the study. H. S. K. performed the statistical analyses and drafted the manuscript. J. P. C. and H. H. contributed to the interpretation of results and provided critical reviews of the manuscript. H. H. is a co‐investigator of the survey on which the analyses are based. All authors read and approved the final manuscript.
import unittest from src.spider.QQZoneFriendMoodSpider import QQZoneFriendMoodSpider class FriendMoodSpiderTest(unittest.TestCase): def test_init(self): QQZoneFriendMoodSpider() def test_change_name(self): fms = QQZoneFriendMoodSpider(mood_num=20) fms.change_username("120000", "test") assert fms.raw_username != fms.username def test_get_friend_mood(self): fms = QQZoneFriendMoodSpider(mood_num=20) fms.get_friend_mood() if __name__ =='__main__': unittest.main()
<filename>cmd/wrgl/hub/repo/delete_cmd.go // SPDX-License-Identifier: Apache-2.0 // Copyright © 2021 Wrangle Ltd package repo import ( "fmt" "github.com/spf13/cobra" "github.com/wrgl/wrgl/cmd/wrgl/hub/api" "github.com/wrgl/wrgl/cmd/wrgl/utils" ) func deleteCmd() *cobra.Command { cmd := &cobra.Command{ Use: "delete NAME", Short: "Remove a repository.", Long: "Remote a repository. This also wipes the data completely.", Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { cs, uri, tok, err := getWrglHubCreds(cmd) if err != nil { return err } user, err := api.GetMe(tok) if err != nil { return utils.HandleHTTPError(cmd, cs, api.APIRoot, uri, err) } val, err := utils.Prompt(cmd, fmt.Sprintf("Type repo name (%s) for confirmation", args[0])) if err != nil { return err } if val != args[0] { return fmt.Errorf("input mismatch %q != %q", val, args[0]) } if err = api.DeleteRepo(tok, user.Username, args[0]); err != nil { return err } cmd.Printf("Deleted repository %q\n", args[0]) return nil }, } return cmd }
Smart Response Concept for Fire Emergency Response in Surabaya City The city of Surabaya is one of the second major cities in Indonesia. At present the growth and development of the city of Surabaya is very rapid in various fields. Increased population density, the number of office building developments, residential areas, industries that are increasingly developing, causing vulnerability to fires. Fire events in the city of Surabaya tend to fluctuate. But the number of events is quite large, especially in 2017. So it is necessary to have innovative efforts to reduce losses due to fires in the city of Surabaya. This fire incident certainly gives a big loss not only materially but also casualties. In this case the Government of Surabaya has begun to implement smart city as one of the steps to deal with various problems in the city of Surabaya, one of which is fire. Through the concept of smart city, it is expected to help reduce losses caused by fires. In line with this, the aim of this research is to formulate a concept of smart fire response to reduce fire losses that occur and as an innovation in fire management. To support the implementation of the research, a triangulation analysis of stakeholders will be carried out (confirmation process from the formulation of researchers related to intelligent response concepts based on existing conditions and results of literature review of expert theories) which then results in stakeholder triangulation as the final result of this intelligent response concept formulation. Keywords—Fire; management fire, smart response, smart fire response INTRODUCTION The city of Surabaya has an area of ± 326.81 km² which is divided into 31 sub-districts and 131 sub-districts. The city of Surabaya is one of the big cities in Indonesia. Surabaya City ranks second as the Big City after the City of Jakarta. As a Big City Surabaya City has a high population density as seen from the population in the city of Surabaya based on the 2010 population census of 2,765,487 inhabitants. At present the growth and development of the city of Surabaya is very rapid in various fields. Increased population density, the number of office building developments, residential areas, industries that are increasingly developing, causing vulnerability to fires. In line with the increasing population, it will increase the need for shelter, resulting in a tendency to increase buildings which then increases the potential for fire threats in the city of Surabaya . Fire events in the city of Surabaya occur repeatedly every year. In 2012 there were 573 fire incidents, in 2014 there were 596 fire incidents, in 2015 there were 600 fire incidents. But there was a decrease in fire events in 2016, where only 300 fire incidents occurred in the city of Surabaya. But the occurrence of kebakaaran has increased throughout 2017, there were 321 fire incidents in the city of Surabaya. Based on the number of fire incidents that occurred throughout 2017, the losses were quite large . Losses due to fire incidents that occurred in the city of Surabaya throughout 2017 from 321 total fire incidents (January-November period) amounting to 18.2 billion rupiah. The fire burned 82 houses and 14 factories and spent as much as 225 land of grass land. While losses based on fatalities from fires were known as 21 people were injured and 1 person died. In an effort to reduce fire losses, the Surabaya City Government implemented e-government that helped the process of managing regional development and community services. E-government also has a system such as a disaster alert system that can help the government in monitoring the occurrence of traffic accidents, fires, fallen trees. This system makes it easier for the government to find the location of the disaster. For example, if a fire occurs, the PMK officer can search for the nearest water source from the location of the fire and the dashboard of his car. This E-Gov is part of an effort to implement the smart city concept. At present the city of Surabaya is being aggressively developing smart cities. According to Nijkamp et al (2009) states that smart cities are cities that are able to use human resources (HR), human capital, and modern telecommunications infrastructure to realize sustainable economic growth and high quality of life. According to Cohen (2010) the main dimensions of the smart city are 6, including smart governance, smart mobility, smart environment, smart people, smart economy, and smart living. From the 6 dimensions that exist, in its application each city can focus on one dimension that exists. For now the city of Surabaya has tried to apply the concept of smart city by providing public services known as command centers that can be accessed by the public through the complaint number 112. As well as known there are also e-dishub applications related to transportation in the city of Surabaya, which is also a efforts to realize smart mobility . In line with this, there is currently no research related to fire management based on smart mobility. The principle of smart mobility can be applied to optimize the response to disaster reports / news. Based on the principle of smart mobility a "smart response" will be formulated for a fire disaster in the city of Surabaya. Therefore this study focuses on formulating the concept of quick response to fire in the city of Surabaya. Based on this, this study wants to find out how the right quick response concept is based on the principle of smart mobility for fire disaster emergency response as an effort to reduce losses due to fire disasters which are still a threat due to constraints of fire fighting facilities in reaching fire locations. II. RESEARCH METHODS There are several stages in this study, first is to compare the results of interviews with stakeholders generated through an in-depth interview process which is then analyzed using comtent analysis. The results of content analysis then become input in the next stage to formulate the concept of intelligent response to fire in the city of Surabaya. The analysis results in the form of a list of potential and problems faced in the effort to handle fires faced by the Surabaya City government. At the next stage the potential results and problems are compared with the intelligent response criteria that have been prepared based on a collection of expert theories and existing existing conditions in the city of Surabaya. This stage is done by making a comparative and triangulation table (where the results of the concept of intelligent fire response concept will be confirmed again to the relevant stakeholders), then the final results are obtained in the form of "intelligent fire response concept in Surabaya City". III. SMART RESPONSE VARIABLE Based on the results of library synthesis from several theories related to this study, it was found that several variables that could support this study included the following: And then to formulate the concept, potency and problems, criteria smart response and existing condition will be comparaed to analyze and then will be confirmed to stakeholders. The ilustration of this proses can be seen this below: The picture above shows the process of drafting the concept of fire rapid response that will be applied in the city of Surabaya. The formulation process will be explained as follows: 1) In the first column explain the indicators of this research, namely rapid reponse; 2) then the second column explains each research variable; 3) in the next column will be compared between the results of potential fire handling problems in the city of Surabaya, the results of rapid response criteria, and the results of interviews with stakeholders and then the appropriate response concept can be formulated and can be applied in the city of Surabaya. Based on the above process from the comparative table, the formulation of intelligent response concepts can be applied that can be applied in emergency response to fire in the city of Surabaya. The formulation of this concept is based on the results of potential and problems faced and intelligent response criteria from the results of the study of several theories and interviews with relevant stakeholders. There are 12 main concepts that have been formulated in the study can be seen in the pictures below: ACKNOWLEDGMENT Thank you to all the respondents, stakeholders, and my mentors for all their help while compiling this journal. I am also very grateful to all agencies involved in assisting in the preparation of this journal including the Command Center 112 Surabaya, Surabaya City Police, Surabaya City Fire Department, Surabaya City Transportation Department and also Surabaya City Disaster Management And Community Protection Department which have been very helpful and contributed data in this study. I also thank my parents and all my friends who supported me to finish this journal to the end.
<reponame>JakobBruenker/ghc<gh_stars>1-10 {-# LANGUAGE Unsafe #-} {-# LANGUAGE NoImplicitPrelude, MagicHash, UnboxedTuples #-} {-# OPTIONS_GHC -funbox-strict-fields #-} {-# OPTIONS_HADDOCK hide #-} ----------------------------------------------------------------------------- -- | -- Module : GHC.IOPort -- Copyright : (c) <NAME> 2019 -- License : see libraries/base/LICENSE -- -- Maintainer : <EMAIL> -- Stability : internal -- Portability : non-portable (GHC Extensions) -- -- The IOPort type. This is a facility used by the Windows IO subsystem. -- We have strict rules with an I/O Port: -- * writing more than once is an error -- * reading more than once is an error -- -- It gives us the ability to have one thread to block, wait for a result from -- another thread and then being woken up. *Nothing* more. -- -- This type is very much GHC internal. It might be changed or removed without -- notice in future releases. -- ----------------------------------------------------------------------------- module GHC.IOPort ( -- * IOPorts IOPort(..) , newIOPort , newEmptyIOPort , readIOPort , writeIOPort , doubleReadException ) where import GHC.Base import GHC.Exception import Text.Show data IOPortException = IOPortException deriving Show instance Exception IOPortException where displayException IOPortException = "IOPortException" doubleReadException :: SomeException doubleReadException = toException IOPortException data IOPort a = IOPort (IOPort# RealWorld a) {- ^ An 'IOPort' is a synchronising variable, used for communication between concurrent threads, where one of the threads is controlled by an external state. e.g. by an I/O action that is serviced by the runtime. It can be thought of as a box, which may be empty or full. It is mostly similar to the behavior of 'Control.Concurrent.MVar.MVar' except 'writeIOPort' doesn't block if the variable is full and the GC won't forcibly release the lock if it thinks there's a deadlock. The properties of IOPorts are: * Writing to an empty IOPort will not block. * Writing to an full IOPort will not block. It might throw an exception. * Reading from an IOPort for the second time might throw an exception. * Reading from a full IOPort will not block, return the value and empty the port. * Reading from an empty IOPort will block until a write. * Reusing an IOPort (that is, reading or writing twice) is not supported and might throw an exception. Even if reads and writes are interleaved. This type is very much GHC internal. It might be changed or removed without notice in future releases. -} -- | @since 4.1.0.0 instance Eq (IOPort a) where (IOPort ioport1#) == (IOPort ioport2#) = isTrue# (sameIOPort# ioport1# ioport2#) -- |Create an 'IOPort' which is initially empty. newEmptyIOPort :: IO (IOPort a) newEmptyIOPort = IO $ \ s# -> case newIOPort# s# of (# s2#, svar# #) -> (# s2#, IOPort svar# #) -- |Create an 'IOPort' which contains the supplied value. newIOPort :: a -> IO (IOPort a) newIOPort value = newEmptyIOPort >>= \ ioport -> writeIOPort ioport value >> return ioport -- |Atomically read the the contents of the 'IOPort'. If the 'IOPort' is -- currently empty, 'readIOPort' will wait until it is full. After a -- 'readIOPort', the 'IOPort' is left empty. -- -- There is one important property of 'readIOPort': -- -- * Only a single threads can be blocked on an 'IOPort'. -- readIOPort :: IOPort a -> IO a readIOPort (IOPort ioport#) = IO $ \ s# -> readIOPort# ioport# s# -- |Put a value into an 'IOPort'. If the 'IOPort' is currently full, -- 'writeIOPort' will throw an exception. -- -- There is one important property of 'writeIOPort': -- -- * Only a single thread can be blocked on an 'IOPort'. -- writeIOPort :: IOPort a -> a -> IO Bool writeIOPort (IOPort ioport#) x = IO $ \ s# -> case writeIOPort# ioport# x s# of (# s, 0# #) -> (# s, False #) (# s, _ #) -> (# s, True #)
/** * Verifies that the given condition is true. If not, throws a SerializationException. */ void ensureCondition(boolean condition, String messageFormat, Object... args) throws SerializationException { if (!condition) { throw new SerializationException(String.format(messageFormat, args)); } }
async def _get_alpha_numbers(category): sql = 'SELECT IF(SUBSTR(name, 1, 1) REGEXP \'^[0-9]\', \'#\', LOWER(SUBSTR(name, 1, 1))) as alpha, COUNT(*) as number ' \ 'FROM _items ' \ 'WHERE category = %s ' \ 'GROUP BY alpha' result = await objects.execute(Item.raw(sql, category)) ret = {chr(i): 0 for i in range(ord('a'), ord('z') + 1)} ret['#'] = 0 for row in result: if row.alpha is None: continue ret[row.alpha] = row.number return ret
#include <bits/stdc++.h> using namespace std; class ClinderellaGirls { public: //O(nlogn) int count(vector<int> t, vector<int> s) { vector<pair<int, int>> ts(t.size(), make_pair(0, 0)); for(int i = 0; i < t.size(); i++) { ts[i].first = t[i]; ts[i].second = s[i]; } sort(ts.begin(), ts.end(), [](auto ts1, auto ts2){return ts1.first < ts2.first || (ts1.first == ts2.first && ts1.second < ts2.second);}); int ans = 0; vector<int> left(ts.size(), INT_MIN), right(ts.size(), INT_MIN); int maxx = INT_MIN, maxx1 = INT_MIN; for(int i = t.size()-2; i >= 0; i--) { maxx = max(maxx, ts[i+1].second); maxx1 = max(maxx1, ts[i+1].first); right[i] = maxx; left[i] = maxx1; } left[t.size()-1] = ts[t.size()-1].first; right[t.size()-1] = ts[t.size()-1].second; for(int i = 0; i < t.size(); i++) { if(left[i] == ts[i].first || right[i] <= ts[i].second) ans++; } return ans; } //o(n^2) int count(vector<int> t, vector<int> s, int temp) { int ans = 0; for(int i = 0; i < t.size(); i++) { for(int j = 0; j < t.size(); j++) { if(i == j) continue; if(t[j] > t[i] && s[j] > s[i]) { ans++; break; } } } return ans; } }; int main() { ClinderellaGirls* app = new ClinderellaGirls(); vector<int> t{46}, s{81}; cout<<app->count(t, s, 1)<<endl; delete app; return 0; }
// BuildContinue takes 1) one argument, an identifer node // 2) empty TreeNode* ASTBuilder::BuildContinue() { if (mTrace) std::cout << "In BuildContinue " << std::endl; ContinueNode *continue_node = (ContinueNode*)gTreePool.NewTreeNode(sizeof(ContinueNode)); new (continue_node) ContinueNode(); TreeNode *target = NULL; if (mParams.size() == 1) { Param p_target = mParams[0]; if (!p_target.mIsEmpty) { MASSERT(p_target.mIsTreeNode && "Target in BuildContinue is not a tree."); target = p_target.mData.mTreeNode; MASSERT(target->IsIdentifier() && "Target in BuildContinue is not an identifier."); continue_node->SetTarget(target); } } mLastTreeNode = continue_node; return continue_node; }
<gh_stars>1000+ // Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package packagestest_test import ( "go/token" "testing" "golang.org/x/tools/go/expect" "golang.org/x/tools/go/packages/packagestest" "golang.org/x/tools/internal/span" ) func TestExpect(t *testing.T) { exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{ Name: "golang.org/fake", Files: packagestest.MustCopyFileTree("testdata"), }}) defer exported.Cleanup() checkCount := 0 if err := exported.Expect(map[string]interface{}{ "check": func(src, target token.Position) { checkCount++ }, "boolArg": func(n *expect.Note, yes, no bool) { if !yes { t.Errorf("Expected boolArg first param to be true") } if no { t.Errorf("Expected boolArg second param to be false") } }, "intArg": func(n *expect.Note, i int64) { if i != 42 { t.Errorf("Expected intarg to be 42") } }, "stringArg": func(n *expect.Note, name expect.Identifier, value string) { if string(name) != value { t.Errorf("Got string arg %v expected %v", value, name) } }, "directNote": func(n *expect.Note) {}, "range": func(r span.Range) { if r.Start == token.NoPos || r.Start == 0 { t.Errorf("Range had no valid starting position") } if r.End == token.NoPos || r.End == 0 { t.Errorf("Range had no valid ending position") } else if r.End <= r.Start { t.Errorf("Range ending was not greater than start") } }, "checkEOF": func(n *expect.Note, p token.Pos) { if p <= n.Pos { t.Errorf("EOF was before the checkEOF note") } }, }); err != nil { t.Fatal(err) } // We expect to have walked the @check annotations in all .go files, // including _test.go files (XTest or otherwise). But to have walked the // non-_test.go files only once. Hence wantCheck = 3 (testdata/test.go) + 1 // (testdata/test_test.go) + 1 (testdata/x_test.go) wantCheck := 7 if wantCheck != checkCount { t.Fatalf("Expected @check count of %v; got %v", wantCheck, checkCount) } }
import React, { useState } from 'react'; import { TouchableWithoutFeedback, Keyboard } from 'react-native'; import { useNavigation, useRoute } from '@react-navigation/native'; import { Button } from '../../components/Button'; import { InputIcon } from '../../components/InputIcon'; import { HeaderSignIn } from '../../components/HeaderSignIn'; import { useAuth } from '../../hooks/auth'; import { Container, Field, Footer } from './styles'; interface Params { username: string; } export function SignInPassword() { const [loading, setLoading] = useState(false); const [password, setPassword] = useState(''); const { signIn } = useAuth(); const navigation = useNavigation(); const route = useRoute(); const { username } = route.params as Params; async function handleSignIn() { try { setLoading(true); const credentials = { username, password }; return await signIn(credentials); } catch (error) { setLoading(false); console.log(error); } } function handleBack() { navigation.goBack(); } return ( <TouchableWithoutFeedback onPress={Keyboard.dismiss}> <Container> <HeaderSignIn onPress={handleBack} title="Entre com sua senha" back /> <Field> <InputIcon icon="md-lock-closed" placeholder="Senha" mask="SSSSSSSS" secureTextEntry onChangeText={setPassword} returnKeyType="send" onSubmitEditing={handleSignIn} /> </Field> <Footer> <Button title="Entrar" enabled={!loading} loading={loading} onPress={handleSignIn}/> </Footer> </Container> </TouchableWithoutFeedback> ); }
def list(self, **kwargs): disk_list = \ self.compute_client.disks.list_by_resource_group(self.group_name) found = [] for disk in disk_list: results = disk.as_dict() result = self.update_dict([results]) found.extend(result) return found
/** * Paints this component. * * @param g The graphics context. */ @Override protected void paintComponent(Graphics g) { if (textArea==null) { return; } visibleRect = g.getClipBounds(visibleRect); if (visibleRect==null) { visibleRect = getVisibleRect(); } if (visibleRect==null) { return; } Color bg = getBackground(); if (getGutter()!=null) { bg = getGutter().getBackground(); } g.setColor(bg); g.fillRect(0,visibleRect.y, cellWidth,visibleRect.height); g.setFont(getFont()); if (aaHints!=null) { ((Graphics2D)g).addRenderingHints(aaHints); } if (textArea.getLineWrap()) { paintWrappedLineNumbers(g, visibleRect); return; } textAreaInsets = textArea.getInsets(textAreaInsets); if (visibleRect.y<textAreaInsets.top) { visibleRect.height -= (textAreaInsets.top - visibleRect.y); visibleRect.y = textAreaInsets.top; } int topLine = (visibleRect.y-textAreaInsets.top)/cellHeight; int actualTopY = topLine*cellHeight + textAreaInsets.top; int y = actualTopY + ascent; FoldManager fm = null; if (textArea instanceof RSyntaxTextArea) { fm = ((RSyntaxTextArea)textArea).getFoldManager(); topLine += fm.getHiddenLineCountAbove(topLine, true); } final int rhsBorderWidth = getRhsBorderWidth(); /* // Highlight the current line's line number, if desired. if (textArea.getHighlightCurrentLine() && currentLine>=topLine && currentLine<=bottomLine) { g.setColor(textArea.getCurrentLineHighlightColor()); g.fillRect(0,actualTopY+(currentLine-topLine)*cellHeight, cellWidth,cellHeight); } */ g.setColor(getForeground()); boolean ltr = getComponentOrientation().isLeftToRight(); if (ltr) { FontMetrics metrics = g.getFontMetrics(); int rhs = getWidth() - rhsBorderWidth; int line = topLine + 1; while (y<visibleRect.y+visibleRect.height+ascent && line<=textArea.getLineCount()) { String number = Integer.toString(line + getLineNumberingStartIndex() - 1); int width = metrics.stringWidth(number); g.drawString(number, rhs-width,y); y += cellHeight; if (fm!=null) { Fold fold = fm.getFoldForLine(line-1); while (fold!=null && fold.isCollapsed()) { int hiddenLineCount = fold.getLineCount(); if (hiddenLineCount==0) { break; } line += hiddenLineCount; fold = fm.getFoldForLine(line-1); } } line++; } } else { int line = topLine + 1; while (y<visibleRect.y+visibleRect.height && line<textArea.getLineCount()) { String number = Integer.toString(line + getLineNumberingStartIndex() - 1); g.drawString(number, rhsBorderWidth, y); y += cellHeight; if (fm!=null) { Fold fold = fm.getFoldForLine(line-1); while (fold!=null && fold.isCollapsed()) { line += fold.getLineCount(); fold = fm.getFoldForLine(line); } } line++; } } }
package org.andengine.opengl.vbo; import android.opengl.GLES20; /** * (c) Zynga 2012 * * @author <NAME> <<EMAIL>> * @since 15:41:14 - 27.03.2012 */ public enum DrawType { // =========================================================== // Elements // =========================================================== STATIC(GLES20.GL_STATIC_DRAW), DYNAMIC(GLES20.GL_DYNAMIC_DRAW), STREAM(GLES20.GL_STREAM_DRAW); private final int mUsage; private DrawType(final int pUsage) { this.mUsage = pUsage; } public int getUsage() { return this.mUsage; } }
<filename>shop-web/user-api/tests/viper_test.go package main import ( "fmt" "github.com/spf13/viper" "testing" ) type MysqlConfig struct { Host string `mapstructure:"host"` port int `mapstructure:"port"` } type ServerConfig struct { ServerName string `mapstructure:"name"` port int `mapstructure:"port"` MysqlInfo MysqlConfig `mapstructure:"mysql"` } func TestConfig(t *testing.T) { v := viper.New() v.SetConfigFile("config.yaml") if err := v.ReadInConfig(); err != nil { panic(err) } serverConfig := ServerConfig{} if err := v.Unmarshal(&serverConfig); err != nil { panic(err) } fmt.Println(serverConfig) }
/** * @param areHorVerFlipped Only used when filling. * @param isPolyline Only used if isFillElseDraw is false. */ private static void drawOrFillPoly( GRect clip, int[] xArr, int[] yArr, int pointCount, boolean areHorVerFlipped, boolean isFillElseDraw, boolean isPolyline, InterfaceColorDrawer colorDrawer, InterfaceClippedPointDrawer clippedPointDrawer, InterfaceClippedLineDrawer clippedLineDrawer, InterfaceLineDrawer lineDrawer, InterfaceRectDrawer rectDrawer) { if (DEBUG) { Dbg.log("drawOrFillPoly(" + clip + ",,," + pointCount + ", " + areHorVerFlipped + ", " + isFillElseDraw + ", " + isPolyline + ",,,,,)"); Dbg.log("xArr = " + Arrays.toString(xArr)); Dbg.log("yArr = " + Arrays.toString(yArr)); } /* * Clip checks. * Usually not doing these early clip checks for other primitives, * since they just draw one thing. * Here, it can avoid trying to draw a lot of edges. */ if (clip.isEmpty()) { Nothing to draw. return; } final GRect bbox = GprimUtils.computePolyBoundingBox( xArr, yArr, pointCount); final GRect cbbox = bbox.intersected(clip); if (cbbox.isEmpty()) { Nothing to draw. return; } /* * Here, pointCount is >= 1 (since cbbox is not empty). * * Ruling out pathological cases now * to make further code simpler and safer. */ if (pointCount <= 2) { if (pointCount <= 0) { /* * Not complaining. */ } else { if (pointCount == 1) { clippedPointDrawer.drawPointInClip( xArr[0], yArr[0]); } else { lineDrawer.drawLine( clip, xArr[0], yArr[0], xArr[1], yArr[1]); } } return; } /* * */ final boolean isOpaque = colorDrawer.isColorOpaque(); if (isOpaque && (!isFillElseDraw)) { Quick path: just drawing poly's edges. final int i0 = (isPolyline ? 1 : 0); for (int i = i0; i < pointCount; i++) { final int ii = ((i == 0) ? pointCount - 1 : i - 1); lineDrawer.drawLine( clip, xArr[ii], yArr[ii], xArr[i], yArr[i]); } return; } /* * Special casing for when the clip is in or out of the polygon, * so that in these cases we don't bother with using pixels flags, * which overhead is proportional to cbbox due to initial zeroization. * NB: Doesn't return for polyline when only the "missing" segment * overlaps clip, but that's fine. */ { final boolean isClipClearlyInOrOut = isClipClearlyInOrOutOfPolygon( clip, xArr, yArr, pointCount); if (isClipClearlyInOrOut) { if (isFillElseDraw) { final boolean isClipInPolyElseOut = GprimUtils.isInPolygon( xArr, yArr, pointCount, clip.xMid(), clip.yMid()); if (DEBUG) { Dbg.log("isClipInPolyElseOut = " + isClipInPolyElseOut); } if (isClipInPolyElseOut) { rectDrawer.fillRect( clip, clip.x(), clip.y(), clip.xSpan(), clip.ySpan(), areHorVerFlipped); } else { Nothing to fill. } } else { Nothing to draw. } return; } } /* * Here we need to use pixels flags. */ final MyTemps temps = TL_TEMPS.get(); final MyClippedPointDrawerWithFlag clippedPointDrawerWithFlag = temps.clippedPointDrawerWithFlag; final DefaultClippedLineDrawer clippedLineDrawerWithFlag = temps.clippedLineDrawer; cbbox is possibly smaller than clip, doesn't hurt even if we still give clip as drawXxx() arg. clippedPointDrawerWithFlag.reset( temps, isFillElseDraw, clippedPointDrawer, cbbox); clippedLineDrawerWithFlag.configure( clippedPointDrawerWithFlag); /* * Drawing edges, and while doing it setting flags * for edges pixels. */ final int i0 = (isPolyline ? 1 : 0); for (int i = i0; i < pointCount; i++) { final int ii = ((i == 0) ? pointCount - 1 : i - 1); DefaultLineDrawer.drawLine( clip, xArr[ii], yArr[ii], xArr[i], yArr[i], clippedLineDrawerWithFlag); } if (!isFillElseDraw) { if (isOpaque) { /* * Case already handled above. */ throw new AssertionError(); } else { /* * Nothing more to do. */ return; } } /* * Here, must fill. */ fillPolygon_edgesFlagsComputed( xArr, yArr, pointCount, areHorVerFlipped, clippedLineDrawer, rectDrawer, cbbox, clippedPointDrawerWithFlag.flagManager, clippedPointDrawerWithFlag.xMinArr, clippedPointDrawerWithFlag.xMaxArr); }
/** * @author Michael Heymel * @since 12/12/16 */ public class Day12LeonardoMonorailTest { @Test public void test1() { String[] input = new String[]{"cpy 41 a", "inc a", "inc a", "dec a", "jnz a 2", "dec a"}; Day12LeonardoMonorail leonardosMonorail = new Day12LeonardoMonorail(); int value = leonardosMonorail.determineRegisterValue(input, getQ1StartMap(), 'a'); Assert.assertEquals(42, value); } @Test public void testQuestion1() throws Exception { File inputFile = FileHelper.forUnitTests("adventofcode/y2016/Day12.txt"); String[] instructions = FileHelper.readLines(inputFile); Day12LeonardoMonorail leonardosMonorail = new Day12LeonardoMonorail(); int value = leonardosMonorail.determineRegisterValue(instructions, getQ1StartMap(), 'a'); Assert.assertEquals(318077, value); } @Test public void testQuestion2() throws Exception { File inputFile = FileHelper.forUnitTests("adventofcode/y2016/Day12.txt"); String[] instructions = FileHelper.readLines(inputFile); Day12LeonardoMonorail leonardosMonorail = new Day12LeonardoMonorail(); int value = leonardosMonorail.determineRegisterValue(instructions, getQ2StartMap(), 'a'); Assert.assertEquals(9227731, value); } private Map<Character, Integer> getQ1StartMap() { Map<Character, Integer> registerValues = new HashMap<>(); registerValues.put('a', 0); registerValues.put('b', 0); registerValues.put('c', 0); registerValues.put('d', 0); return registerValues; } private Map<Character, Integer> getQ2StartMap() { Map<Character, Integer> registerValues = new HashMap<>(); registerValues.put('a', 0); registerValues.put('b', 0); registerValues.put('c', 1); registerValues.put('d', 0); return registerValues; } }
// Copyright (c) 2010-2022, Lawrence Livermore National Security, LLC. Produced // at the Lawrence Livermore National Laboratory. All Rights reserved. See files // LICENSE and NOTICE for details. LLNL-CODE-806117. // // This file is part of the MFEM library. For more information and source code // availability visit https://mfem.org. // // MFEM is free software; you can redistribute it and/or modify it under the // terms of the BSD-3 license. We welcome feedback and contributions, see file // CONTRIBUTING.md for details. #ifndef MFEM_VTK #define MFEM_VTK #include "../fem/geom.hpp" #include "../general/binaryio.hpp" namespace mfem { // Helpers for reading and writing VTK format /// @brief Helper class for converting between MFEM and VTK geometry types. /// /// Note: The VTK element types defined are at: https://git.io/JvZLm struct VTKGeometry { /// @name VTK geometry types ///@{ static const int POINT = 1; /// @name Low-order (linear, straight-sided) VTK geometric types ///@{ static const int SEGMENT = 3; static const int TRIANGLE = 5; static const int SQUARE = 9; static const int TETRAHEDRON = 10; static const int CUBE = 12; static const int PRISM = 13; ///@} /// @name Legacy quadratic VTK geometric types ///@{ static const int QUADRATIC_SEGMENT = 21; static const int QUADRATIC_TRIANGLE = 22; static const int BIQUADRATIC_SQUARE = 28; static const int QUADRATIC_TETRAHEDRON = 24; static const int TRIQUADRATIC_CUBE = 29; static const int QUADRATIC_PRISM = 26; static const int BIQUADRATIC_QUADRATIC_PRISM = 32; ///@} /// @name Arbitrary-order VTK geometric types ///@{ static const int LAGRANGE_SEGMENT = 68; static const int LAGRANGE_TRIANGLE = 69; static const int LAGRANGE_SQUARE = 70; static const int LAGRANGE_TETRAHEDRON = 71; static const int LAGRANGE_CUBE = 72; static const int LAGRANGE_PRISM = 73; ///@} ///@} /// Permutation from MFEM's prism ordering to VTK's prism ordering. static const int PrismMap[6]; /// @brief Permutation from MFEM's vertex ordering to VTK's vertex ordering. /// @note If the MFEM and VTK orderings are the same, the vertex permutation /// will be NULL. static const int *VertexPermutation[Geometry::NUM_GEOMETRIES]; /// Map from MFEM's Geometry::Type to linear VTK geometries. static const int Map[Geometry::NUM_GEOMETRIES]; /// Map from MFEM's Geometry::Type to legacy quadratic VTK geometries/ static const int QuadraticMap[Geometry::NUM_GEOMETRIES]; /// Map from MFEM's Geometry::Type to arbitrary-order Lagrange VTK geometries static const int HighOrderMap[Geometry::NUM_GEOMETRIES]; /// Given a VTK geometry type, return the corresponding MFEM Geometry::Type. static Geometry::Type GetMFEMGeometry(int vtk_geom); /// @brief Does the given VTK geometry type describe an arbitrary-order /// Lagrange element? static bool IsLagrange(int vtk_geom); /// @brief Does the given VTK geometry type describe a legacy quadratic /// element? static bool IsQuadratic(int vtk_geom); /// @brief For the given VTK geometry type and number of points, return the /// order of the element. static int GetOrder(int vtk_geom, int npoints); }; /// Data array format for VTK and VTU files. enum class VTKFormat { /// Data arrays will be written in ASCII format. ASCII, /// Data arrays will be written in binary format. Floating point numbers will /// be be output with 64 bits of precision. BINARY, /// Data arrays will be written in binary format. Floating point numbers will /// be be output with 32 bits of precision. BINARY32 }; /// @brief Create the VTK element connectivity array for a given element /// geometry and refinement level. /// /// The output array @a con will be such that, for the @a ith VTK node index, /// con[i] will contain the index of the corresponding node in MFEM ordering. void CreateVTKElementConnectivity(Array<int> &con, Geometry::Type geom, int ref); /// @brief Outputs encoded binary data in the base 64 format needed by VTK. /// /// The binary data will be base 64 encoded, and compressed if @a /// compression_level is not zero. The proper header will be prepended to the /// data. void WriteVTKEncodedCompressed(std::ostream &os, const void *bytes, uint32_t nbytes, int compression_level); /// @brief Return the VTK node index of the barycentric point @a b in a /// triangle with refinement level @a ref. /// /// The barycentric index @a b has three components, satisfying b[0] + b[1] + /// b[2] == ref. int BarycentricToVTKTriangle(int *b, int ref); /// Determine the byte order and return either "BigEndian" or "LittleEndian" const char *VTKByteOrder(); /// @brief Write either ASCII data to the stream or binary data to the buffer /// depending on the given format. /// /// If @a format is VTK::ASCII, write the canonical ASCII representation of @a /// val to the output stream. Subnormal floating point numbers are rounded to /// zero. Otherwise, append its raw binary data to the byte buffer @a buf. /// /// Note that there are specializations for @a uint8_t (to write as a numeric /// value rather than a character), and for @a float and @a double values to use /// the precision specified by @a format. template <typename T> void WriteBinaryOrASCII(std::ostream &os, std::vector<char> &buf, const T &val, const char *suffix, VTKFormat format) { if (format == VTKFormat::ASCII) { out << val << suffix; } else { bin_io::AppendBytes(buf, val); } } /// @brief Specialization of @ref WriteBinaryOrASCII for @a uint8_t to ensure /// ASCII output is numeric (rather than interpreting @a val as a character.) template <> void WriteBinaryOrASCII<uint8_t>(std::ostream &os, std::vector<char> &buf, const uint8_t &val, const char *suffix, VTKFormat format); /// @brief Specialization of @ref WriteBinaryOrASCII for @a double. /// /// If @a format is equal to VTKFormat::BINARY32, @a val is converted to a @a /// float and written as 32 bits. Subnormals are rounded to zero in ASCII /// output. template <> void WriteBinaryOrASCII<double>(std::ostream &os, std::vector<char> &buf, const double &val, const char *suffix, VTKFormat format); /// @brief Specialization of @ref WriteBinaryOrASCII<T> for @a float. /// /// If @a format is equal to VTKFormat::BINARY, @a val is converted to a @a /// double and written as 64 bits. Subnormals are rounded to zero in ASCII /// output. template <> void WriteBinaryOrASCII<float>(std::ostream &os, std::vector<char> &buf, const float &val, const char *suffix, VTKFormat format); /// @brief Encode in base 64 (and potentially compress) the given data, write it /// to the output stream (with a header) and clear the buffer. /// /// @sa WriteVTKEncodedCompressed. void WriteBase64WithSizeAndClear(std::ostream &os, std::vector<char> &buf, int compression_level); } // namespace mfem #endif
/** * An activity which tests the user's knowledge of our solar system. */ public class MultipleSelectionActivity extends AbstractQuestionActivity { private static final int MULTIPLE_SELECTION_LIMIT = 4; /** * The question to display. */ private static final String QUESTION = "Which of the following are moons of Jupiter? " + "You may select up to four answers."; /** * The answers and the associated identifiers to display. */ private static final LinkedHashMap<CharSequence, Answer> answerMap = new LinkedHashMap<>(); static { answerMap.put("1.", new ImmutableAnswer("Pheobe", false)); answerMap.put("2.", new ImmutableAnswer("Ganymede", true)); answerMap.put("3.", new ImmutableAnswer("Triton", false)); answerMap.put("4.", new ImmutableAnswer("Lunar", false)); answerMap.put("5.", new ImmutableAnswer("Kore", true)); answerMap.put("6.", new ImmutableAnswer("Callisto", true)); answerMap.put("7.", new ImmutableAnswer("Titan", false)); } @Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); displayQuestionAndAnswers(); getAnswerGroup().setMultipleSelectionLimit(MULTIPLE_SELECTION_LIMIT); } /** * Adds all answer and identifiers to the view. */ @SuppressWarnings("unchecked") private void displayQuestionAndAnswers() { getQuestionContainer().setText(QUESTION); for (final CharSequence identifier : answerMap.keySet()) { final DecoratedAnswerCard decoratedAnswerCard = new DecoratedAnswerCard(this); decoratedAnswerCard.setLayoutParams(new LayoutParams(MATCH_PARENT, WRAP_CONTENT)); decoratedAnswerCard.setIdentifier(identifier, false); decoratedAnswerCard.setAnswer(answerMap.get(identifier), false); decoratedAnswerCard.addDecorator(createColorFadeDecorator(), false); decoratedAnswerCard.addDecorator(createAlphaDecorator(), false); getAnswerGroup().addAnswer(decoratedAnswerCard); } } /** * @return a new ColorFadeDecorator */ private ColorFadeDecorator createColorFadeDecorator() { final ColorSupplier colorSupplier = new ColorSupplier() { @Override public int getColor(final boolean marked, final boolean selected, final boolean answerIsCorrect) { if (marked) { if (selected) { return answerIsCorrect ? 0xFF2E7D32 : 0xFFb71c1c; // Green, red } else { return answerIsCorrect ? 0xFF673AB7 : 0xFFFFFFFF; // Purple, white } } else { return selected ? 0xFFFF9800 : 0xFFFFFFFF; // Orange, white } } }; return new ColorFadeDecorator(colorSupplier); } /** * @return a new AlphaDecorator */ private AlphaDecorator createAlphaDecorator() { final AlphaSupplier alphaSupplier = new AlphaSupplier() { @Override public float getAlpha(final boolean marked, final boolean selected, final boolean answerIsCorrect) { if (marked && !selected && !answerIsCorrect) { return 0.3f; } else { return 1f; } } }; return new AlphaDecorator(alphaSupplier); } }
//------------------------------------------------------------- // Return Y component of projection matrix //------------------------------------------------------------- double CGLView::getYProjection(double x, double y, double z){ double wy=P[1]*x+P[5]*y+P[9]*z+P[13]; double winy=0.5*(wy+1); return winy; }
Mathematical knowledge: internal, social and cultural aspects I discuss some general aspects of the creation, interpretation, and reception of mathematics as a part of civilization and culture. First, one can simply identify mathematics with the contents of mathematical manuscripts, books, papers and lectures, with the increasingly growing net of theorems, definitions, proofs, constructions, conjectures (should I include software as well ?...) -with what contemporary mathematicians present at the conferences, keep in the libraries and electronic archives, take pride in, award each other for, and occasionally bitterly dispute the origin of. In short, mathematics is simply what mathematicians are doing, exactly in the same way as music is what musicians are doing. Second, one can argue that mathematics is a human activity deeply rooted in reality, and permanently returning to reality. From counting on one's fingers to moon-landing to Google, we are doing mathematics in order to understand, create, and handle things, and perhaps this understanding is mathematics rather than intangible murmur of accompanying abstractions. Mathematicians are thus more or less responsible actors of human history, like Archimedes helping to defend Syracuse (and to save a local tyrant), Alan Turing cryptanalyzing Marshal Rommel's intercepted military dispatches to Berlin, or John von Neumann suggesting high altitude detonation as an efficient tactics of bombing. Accepting this viewpoint, mathematicians can defend their trade by stressing its social utility. In this role, a mathematician can be as morally confused as the next person, and if I were to put on display some trade-specific particularities of such a confusion, I could not find anything better than the bitter irony of (p. 11): " mathematics can also be an indispensable tool. Thus, when the effect of fragmentation bombs on human bodies was to be tested but humanitarian concerns prohibited testing on pigs (italics mine. Yu. M.), mathematical simulation was put into play." Or, third, there is a noble vision of the great Castle of Mathematics, towering somewhere in the Platonic World of Ideas, which we humbly and devotedly discover (rather than invent). The greatest mathematicians manage to grasp outlines of the Grand Design, but even those to whom only a pattern on a small kitchen tile is revealed, can be blissfully happy. Alternatively, if one is inclined to use a semiotic metaphor, Mathematics is a proto-text whose existence is only postulated but which nevertheless underlies all corrupted and fragmentary copies we are bound to deal with. The identity of the writer of this proto-text (or of the builder of the Castle) is anybody's guess, but Georg Cantor with his vision of infinity of infinities directly inspired by God, and Kurt Gödel with his "ontological proof", seemingly had no doubts on this matter. Various shades and mixes of these three attitudes, social positions, and implicated choices of the individual behavior, color the whole discussion that follows. The only goal of this concise Preface is to make the reader conscious of the intrinsic tensions in our presentation, rather than imitate clear vision and offer definite judgements where there are none. One last warning about historical references in this exposition. There are two different modes of reading old texts: one, to understand the times and ethnos they were written in, another -to throw some light on the values and prejudices of our times. In the history of mathematics, the polar attitudes are represented by "ethnomathematics" vs Bourbaki style history. For the sake of this presentation, I explicitly and consciously adopt the "modernizing" viewpoint. Acknowledgement. Silke Wimmer-Zagier provided some sources on the history of Chinese and Japanese mathematics and discussed their relevance to this project. Dmitri Manin explained me Google's strategy of page ranking. I gratefully acknowledge their generous help. I. Mathematical knowledge I.1. Bird's eye view. Sir Michael Atiyah starts his report with the following broad outline: "The three great branches of mathematics are, in historical order, Geometry, Algebra and Analysis. Geometry we owe essentially to Greek civilization, Algebra is of Indo-Arab origin and Analysis (or the Calculus) was the creation of Newton and Leibniz, ushering in the modern era." He then explains that in the realm of physics, these branches correspond respectively to the (study of) Space/Time/Continuum: "There is little argument about Geometry being the study of space, but it is perhaps less obvious that Algebra is the study of time. But, any algebraic system involves the performance of sequential operations (addition, multiplication, etc.) and these are conceived as being performed one after another. In other words Algebra requires time for its meaning (even if we usually only need discrete instants of time)." One can argue for an alternative viewpoint on Algebra according to which it has most intimate relations not with Physics but with Language. In fact, observing the graduate emergence of place-value notation for numbers, and later algebraic notation for variables and operations, one can recognize two historical stages. At the first stage, notation serves principally to shorten and unify the symbolic representation of a certain pool of meanings. At this stage, a natural language could (and did) serve the same goal, only less efficiently. Therefore one can reasonably compare this process with the development of a specialized sub-dialect of the natural language. The so-called Roman numerals still in use for ornamental purposes are fossilized remnants of this stage. As another helpful comparison, perhaps more streamlined and better documented, one can invoke the emergence and evolution of chemical notation. At the second stage, algorithms for addition/multiplication and later division of numbers in a place-value notation are devised. In a parallel development, variables and algebraic operations start to be combined into identities and equations, and then to strings of equations obeying universal rules of identical transformations/deductions. At this stage, expressions in the new (mathematical) dialect become not so much carriers of certain meanings as a grist for the mill of computations. It is this shift of the meaning, from the more or less explicit semantics of notation to the hidden semantics of algorithms transforming strings of symbols, that was the crucial chain of events marking the birth of Algebra. Nothing similar to this second stage happened to the natural languages. To the contrary, when in the 60s of the twentieth century large computers made possible first experiments with algorithmic processing of texts in English, Russian, French (e.g. for implementing automatic translation), it became clear how unsuitable for computer processing natural languages were. Huge data bases for vocabularies were indispensable. Intricate and illogical nets of rules governed morphology, word order, and compatibility of grammatical constructions; worse, in different languages these rules were capriciously contradictory. And after all efforts, automatic translation without subsequent editing by a human being never produced satisfactory results. This property of human languages -their resistance to algorithmic processing -is perhaps the ultimate reason why only mathematics can furnish an adequate language for physics. It is not that we lack words for expressing all this E = mc 2 and e iS(φ) Dφ stuff -words can be and are easily invented -the point is that we still would not be able to do anything with these great discoveries if we had only words for them. But we cannot just skip words and deal only with formulas either. Words in mathematical and scientific texts play three basic roles. First, they furnish multiple bridges between the physical reality and the world of mathematical abstractions. Second, they carry value judgements, sometimes explicit, sometimes implicit, governing our choices of particular chains of mathematical reasonings, in the vast tree of "all" feasible but mostly empty formal deductions. And last but not least, they allow us to communicate, teach and learn. I will conclude with a penetrating comment of Paul Samuelson regarding use of words vs mathematical symbols in economic models (cited from ): "When we tackle them by words, we are solving the same equations as when we write out those equations. Where the really big mistakes are is in the formulation of premises. One of the advantages of the mathematical medium -or, strictly speaking, of the mathematician's customary canons of exposition of proof, whether in words or in symbols -is that we are forced to lay our cards on the table so that all can see our premises." Returning to the large scale map of mathematical provinces, Geometry/Algebra/Analysis, one should find a place on it for (mathematical) Logic, with its modern impersonation into the Theory of Algorithms and Computer Science. There are compelling arguments to consider it as a part of broadly conceived Algebra (pace Frege.) And if one agrees on that, Atiyah's insight about association of Algebra with Time, becomes corroborated. In fact, the great shift in the development of Logic in the 30s of the twentieth century occurred when Alan Turing used a physics metaphor, "Turing machine", for the description of an algorithmic computation. Before his work, Logic was considered almost exclusively in para-linguistic terms, as we did above. Turing's vision of a finite automaton moving in discrete steps along one-dimensional tape and writing/erasing bits on it, and theorem about existence of a universal machine of this type, stress exactly this temporal aspect of all computations. Even more important, the idea of computation as a physical process not only helped create modern computers, but also opened way to thinking in physical terms, both in classical and quantum mode, about general laws of storing and processing information. I.2. Objects of mathematical knowledge. When we study biology, we study living organisms. When we study astronomy, we study celestial bodies. When we study chemistry, we study varieties of matter and of ways it can transform itself. We make observations and measurements of raw reality, we devise narrowly targeted experiments in a controlled environment (not in astronomy however), and finally we produce an explanatory paradigm, which becomes a current milestone of science. But what are we studying when we are doing mathematics? A possible answer is this: we are studying ideas which can be handled as if they were real things. (P. Davis and R. Hersh call them "mental objects with reproducible properties"). Each such idea must be rigid enough in order to keep its shape in any context it might be used. At the same time, each such idea must have a rich potential of making connections with other mathematical ideas. When an initial complex of ideas is formed (historically, or pedagogically), connections between them may acquire the status of mathematical objects as well, thus forming the first level of a great hierarchy of abstractions. At the very base of this hierarchy are mental images of things themselves and ways of manipulating them. Miraculously, it turns out that even very high level abstractions can somehow reflect reality: knowledge of the world discovered by physicists can be expressed only in the language of mathematics. Here are several basic examples. I.2.1. Natural numbers. This is arguably the oldest proto-mathematical idea. "Rigidity" of 1, 2, 3, ... is such that first natural numbers acquire symbolic and religious meanings in many cultures. Christian Trinity, or Buddhist Nirvana come to mind: the latter evolved from Sanskrit nir-dva-n-dva where dva means "two", and the whole expression implies that the state of absolute blessedness is attained through the extinction of individual existence and becoming "one" with Universe. (These negative connotations of the idea of "two" survive even in modern European languages where it carries association with the idea of "doubt" : cf. Latin dubius, German Zweifeln, and Goethe's description of Mephistopheles). Natural number is also a proto-physical idea: counting material objects (and later immaterial objects as well, such as days and nights) is the first instance of measurement, cf. below. Natural number becomes a mathematical idea when: a) Ways of handling natural numbers as if they were things are devised: adding, multiplying. b) The first abstract features of the internal structure of the totality of all natural numbers is discovered: prime numbers, their infinity, existence and uniqueness of prime decomposition. These two discoveries were widely separated historically and geographically; arguably, culturally and philosophically as well. Place-value system marks the origin of what we nowadays call applied mathematics, primes mark the origin of what used to be called pure mathematics. Here are a few details. At first, both numbers and ways of handling them are encoded by specific material objects: fingers and other body parts, counting sticks, notches. Notch is already a sign, not a proper thing, and it may start signifying not 1, but 10 or 60, depending on where in the row of other symbols it is situated. A way to the early great mathematical discovery, that of place-value numeration system is open. However, a consistent place-value system also requires a sign for "zero", which came late and marked a new level of mathematical abstraction. An expressive summary in sketches the following picture: "In c. 2074 BCE, king Shulgi organized a military reform in the Sumerian Empire, and the next year an administrative reform (seemingly introduced under the pretext of a state of emergency but soon made permanent) enrolled the larger part of the working population in quasi-servile labour crews and made overseer scribes accountable for the performance of their crews, calculated in abstract units worth 1/60 of a working day (12 minutes) and according to fixed norms. In the ensuing bookkeeping, all work and output therefore had to be calculated precisely and converted into these abstract units, which asked for multiplications and divisions en masse. Therefore, a place value system with base 60 was introduced for intermediate calculations. Its functioning presupposed the use of tables of multiplication, reciprocals and technical constants and the training for their use in schools; the implementation of a system whose basic idea was "in the air" for some centuries therefore asked for decisions made at the level of the state and implemented with great force. Then as in many later situations, only war provided the opportunity for such social willpower." Primes, on the other hand, seem to spring off from pure contemplation, as well as the idea of a very concrete infinity, that of natural numbers themselves, and that of prime numbers. The proof of infinity of primes codified in Euclid's Elements is a jewel of an early mathematical reasoning. Let us recall it briefly in modern notation: having a finite list of primes p 1 , . . . , p n , we can add one more prime to it by taking any prime divisor of p 1 . . . p n + 1. This is a perfect example of handling mathematical ideas as if they were rigid material objects. And at this stage, they are already pure ideas bafflingly unrelated to any vestiges of Sumerian or whatever material notation. Looking at the modern decimal notation of a number, one can easily tell whether it is even or divisible by 5, but not whether it is prime. Generations of mathematicians after Euclid marveled at an apparent randomness with which primes pop up in the natural series. Observation, controlled experimentation, and recently even engineering of primes (producing and recognizing large primes by computationally feasible algorithms, for security applications) became a trademark of much of modern number theory. 1.2.2. Real numbers and "geometric algebra". Integers resulted from counting, but other real numbers came from geometry, as lengths and surfaces, volumes. The discovery by Pythagoras of the incommensurability of the diagonal of a square with its side was at the same time the demonstration that there were more "magnitudes" that "numbers". Magnitudes were later to become real numbers. Arithmetical operations on integers evolved from putting together sticks and notches to systematic handling normalized notations in an ordered way. Algebraic operations on reals evolved from drawing and contemplating sketches which could intermittently be plans of building sites or results of surveying, and renderings of Euclidean circles, squares and angles. Historians of mathematics in the twentieth century argued pro and contra interpretation of a considerable part of Greek mathematics as "geometric algebra". One example of it is a sketch of a large square subdivided into four parts by two lines parallel to the orthogonal sides so that two of the parts are again squares. This sketch can be read as an expression and a proof of the algebraic identity (a + b) 2 = a 2 + b 2 + 2ab. Our modernizing perspective suggests a more general consideration of several modes of mental processes, in particular those related to mathematics. The following two are the basic ones: a) Conscious handling of a finite and discrete symbolic system, with explicitly prescribed laws of formation of meaningful strings of symbols, constructing new strings, and less explicit rules of deciding which strings are "interesting" (left brain, linguistic, algebraic activity). b) Largely subconscious handling of visual images, with implicit reliance upon statistics of past experience, estimating probabilities of future outcomes, but also judging balance, harmony, symmetry (right brain, visual arts and music, geometry). Mental processes of mathematicians doing research must combine these two modes in many sophisticated ways. This is not an easy task, in particular because information processing rates are so astonishingly different, of the order 10 bit/sec for conscious symbolic processing, and 10 7 bit/sec for subconscious visual processing (cf. ). Probably because of inner tension created by this (and other) discrepancies, they tend to be viewed emotionally, as an embodiment of values -cold intellect against warm feeling, bare logic vs. penetrating intuition. See beautiful articles by David Mumford and who eloquently defends statistics against logic, but invokes mathematical statistics, which is built, as any mathematical discipline, in an extremely logical way. Returning to real numbers and the "geometric algebra" of the Greeks, we recognize in it a sample of right brain treatment of a subject which later historically evolved into something dominated by the left brain. Or, as Mumford puts it, modern algebra is a grammar of actions with objects which are inherently geometric, and Greek algebra is an early compendium of such actions. Perhaps the continuity of Greek geometric thinking as a cognitive phenomenon can be traced not only in modern geometry but also in theoretical physics. The last decades have seen such a vigorous input of insights, conjectures, and sophisticated constructions, from physics to mathematics, that an expression "physical mathematics" was coined. The theoretical thinking underlying the creative use of Feynman's path integral, strikes us by the richness of results constructed on a foundation which is mathematically shaky by any standards. This can be considered as an additional justification of the notion that "geometric algebra" was a reality, and not only our reconstruction of it. I.2.3. e πi = −1: a tale of three numbers. Arguably, Euler's formula e πi = −1 is the most beautiful single formula in all mathematics. It combines in a highly unexpected way three (or four, if one counts −1 separately) constants that were discovered in various epochs, and emanate an aura of very different motivations. Very briefly, π = 3, 1415926... is a legacy of the Greeks (again). Even its existence as a real number, that is (the length of) a line segment, or surface of a square, is not something that can be grasped without an additional mental effort. The problem of "squaring the circle" is not just the next geometric problem, but a legitimacy test, with an uncertain outcome. By contrast, e = 2, 7128128... emerged in the already mature, if not fully developed Western mathematics (mid-seventeenth century). It is a combined theoretical by-product of the invention of logarithm tables as a tool of optimization of numerical algorithms (addition replacing multiplication) and the problem of "squaring the hyperbole". None of the classical geometric constructions led to e and none suggested any relation between e and π. Finally, the introduction of i = √ −1, an "imaginary" number, a monstrosity for many contemporaries, was literally imposed on Cardano by the formulas for roots of a cubic equation expressed in radicals. When all three roots are real, formulas required complex numbers in intermediate calculations. Euler's formula is a remarkable example of "infinite" identities of which he (and later Srinivasa Ramanujan) was a great practitioner. In fact, e πi = −1 is a particular case of the series e ix = ∞ n=0 (ix) n /n! which gives a more general expression e ix = cos x + i sin x. Further progress in our understanding of real numbers and theory of limits relegated the Euler and Ramanujan great skills of dealing with "infinite identities" to backstage. G. Hardy, describing Ramanujan's mathematical psyche, was at a loss trying to interiorize it. This story does tell something about the logic vs statistics dichotomy, but I cannot pinpoint even a tentative statement. As a totally unrelated development, e ix = cos x + i sin x turned out to be at the base of an adequate description of one of the most important and unexpected discoveries of the physics of twentieth century: quantum probability amplitudes, their wave-like behaviour, and quantum interference. "By a 'set' we mean any collection M into a whole of definite, distinct objects m (called the 'elements' of M ) of our perception or our thought." German syntax allows Cantor to mirror the meaning of the sentence in its structure: Objekten m unserer Anschauung etc are packed between the opening bracket Zusammenfassung and the closing bracket zu einem Ganzen. Contemplating this definition for the first time, it is difficult to imagine what kind of mathematics or, for that matter, what kind of mental activity at all, can be performed with such meager means. In fact, it is precisely this parsimony which allowed Cantor to invent his "diagonal process", to compare infinities as if they were physical objects, and to discover that the infinity of real numbers is strictly larger than that of integers. Simultaneously, Cantor's intuition underlies most of foundational work in the mathematics of the twentieth century: it is either vigorously refuted by logicists of various vintages, or works as a great unification project, in both guises of Set Theory and its successor, Category Theory. I.2.5. "All men are mortal, Kai is a man ...": from syllogisms to software. Aristotle codified elementary forms of statements and basic rules of logical deductions. The analogies between them and elementary arithmetics were perceived early, but made precise late; we recognize Boole's role in this development. Philosophers of science disagreed about hierarchical relationships between the two. Frege, for example, insisted that arithmetic was a part of logic. The 20th century has seen a sophisticated fusion of both realms when in the thirties Gödel, Tarski and Church produced mathematical models of mathematical reasoning going far beyond the combinatorics of finite texts. One of the important tools was the idea, going back to Leibniz, that one can use a computable enumeration of all texts by integers allowing to replace logical deductions by arithmetical operations. Tarski modeled truth as "truth in all interpretations", and found out that the set of (numbers of) arithmetical truths cannot be expressed by an arithmetical formula. Infinitarity of Tarski's notion of truth is connected with the fact that logical formulas are allowed to contain quantifiers "for all" and "there exists", so interpretation of a finite formula involves potentially infinite sequence of verifications. Gödel, using a similar trick, demonstrated that the set of arithmetical truths deducible from any finite system of axioms and deduction rules cannot coincide with the set of all true formulas. Self-referentiality was an essential common feature of both proofs. Among other things, Gödel and Tarski showed that the basic hierarchical relation is that between a language and a metalanguage. Moreover, only their interrelation and not absolute status is objective. One can use logic to describe arithmetics, and one can use arithmetics to discuss logic. A skillful mixture of both levels unambiguously shows inherent restrictions of pure logic as a cognitive tool, even when it is applied "only" to pure logic itself. Turing and Church during the same decade analyzed the idea of "computability", which had a more arithmetic flavor from the start. Alan Turing made a decisive step by substituting a physical image (Turing machine) in place of the traditional linguistic embodiments for logic and computation dominating both Tarski's and Gödel's discourses. This was a great mental step preparing the subsequent technological evolution: the emergence of programmable electronic calculators. Theoretically, both Church and Turing discovered that there existed a "final" notion of computability embodied in the universal recursive function, or universal Turing machine. This was not a mathematical theorem, but rather a "physical discovery in a metaphysical realm", justified not by a proof but by the fact that all subsequent attempts to conceive an alternative version led to an equivalent notion. A "hidden" (at least in popular accounts) part of this discovery was the realization that the correct definition of computability includes elements of un-computability that cannot be avoided at any cost: a recursive function is generally not everywhere defined, and we cannot decide at which points it is defined and at which not. Computers which are functioning now, embody a technologically alienated form of these great insights. I.3. Definitions/Theorems/Proofs. I will briefly describe now tangible traces of "pure" mathematics as a collective activity of the contemporary professional community. I will stress not so much organizational forms of this activity as external reflection of the inner structure of the world of mathematical ideas. Look at any contemporary paper in one of the leading research journals like Annals of Mathematics or Inventiones mathematicae. Typically, it is subdivided into reasonably short patches called Definitions, Theorems (with Lemmas and Propositions as subspecies), and Proofs, that can be considerably longer. These are the basic structure blocks of a modern mathematical exposition; frills like motivation, examples and counterexamples, discussion of special cases, etc., make it livelier. This tradition of organizing mathematical knowledge is inherited from the Greeks, especially Euclid's Elements. The goal of a definition is to introduce a mathematical object. The goal of a theorem is to state some of its properties, or interrelations between various objects. The goal of a proof is to make such a statement convincing by presenting a reasoning subdivided into small steps each of which is justified as an "elementary" convincing argument. To put it simply, we first explain, what we are talking about, and then explain why what we are saying is true (pace Bertrand Russell). Definitions. The first point is epistemologically subtle and controversial, because what we are talking about are extremely specific mental images not present normally in an untrained mind (what is a a real number? a random variable? a group?). Presenting some basic objects above, I used narrative devices to make them look more graphic or vivid, but gave no real definitions in the technical sense of the word. Euclid's definitions usually consist of a mixture of explanations involving visual images, and "axioms" involving some idealized properties that we want to impose on them. In contemporary mathematics, one can more or less explicitly restrict oneself to the basic mental image of a Cantorian "set", and a limited inventory of properties of sets and constructions of new sets from given ones. Each of our Definitions then can be conceived as a standardized description of a certain structure, consisting of sets, their subsets etc. This is a viewpoint that was developed by the Bourbaki group and which proved to be an extremely influential, convenient and widely accepted way of organizing mathematical knowledge. Inevitably, a backlash ensued, aimed mostly at the value system supporting this neo-Euclidean tradition, but its pragmatic merits are indisputable. At the very least, it enabled a much more efficient communication between mathematicians coming from different fields. If one adopts a form of Set Theory as a basis for further constructions, only set-theoretic axioms remain "axioms" in Euclid's sense, something like intuitively obvious properties accepted without further discussion (but see below), whereas the axioms of real numbers or of plane geometry become provable properties of explicitly constructed set-theoretic objects. Bourbaki in their multivolume treatment of contemporary mathematics developed this picture and added to it a beautiful notion of "structures-mères" (the issue is dedicated to the history of the Bourbaki group). In a broader framework, one can argue that mathematicians have developed a specific discursive behavior which might be called "culture of definitions". In this culture, many efforts are invested into clarification of the content (semantics) of basic abstract notions and syntax of their interrelationships, whereas the choice of words (or even to a larger degree, notations) for these notions is a secondary matter and largely arbitrary convention, dictated by convenience, aesthetic considerations, by desire to invoke appropriate connotations. This can be compared with some habits of humanistic discourse where such terms as Dasein or différance are rigidly used as markers of a certain tradition, without much fuss about their meaning. I.4. Problems/Conjectures/Research Programs. From time to time, a paper appears which solves, or at least presents in a new light, some great problem, or conjecture, which was with us for the last decades, or even centuries, and resisted many efforts. Fermat's Last Theorem (proved by Andrew Wiles), the Poincaré Conjecture, the Riemann Hypothesis, the P/NP-problem these days even make newspapers headlines. David Hilbert composed his talk at the second (millennium) International Congress of Mathematicians in Paris on August 8, 1900, as a discussion of ten outstanding mathematical problems which formed a part of his list of 23 problems compiled in the published version. One can argue about their comparative merit in pure scientific terms, but certainly they played a considerable role in focusing efforts of mathematicians on well defined directions, and providing clear tasks and motivation for young researchers. Whereas a problem (a yes/no question) is basically a guess about validity or otherwise of a certain statement (like Goldbach's problem: every even number ≥ 4 is a sum of two primes), a Research Program is an outline of a broad vision, a map of a landscape some regions of which are thoroughly investigated, whereas other parts are guessed on the base of analogies, experimentation with simple special cases, etc. The distinction between the two is not absolute. Problem Number one, the Continuum Hypothesis, which in the epoch of Cantor and Hilbert looked like a yes/no question, generated a vast research program which established, in particular, that neither of the two answers is deducible within the generally accepted axiomatic Set Theory. On the other hand, the explicit formulation of a research program can be a risky venture. Problem Number 6 envisioned the axiomatization of physics. In the next three decades or so physics completely changed its face. Some of the most influential Research Programs of the last decades were expressions of insights into the complex structure of Platonian reality. A. Weil guessed the existence of cohomology theories for algebraic manifolds in finite characteristics. Grothendieck constructed them, thus forever changing our understanding of the relationships between continuous and discrete. When Poincaré said that there are no solved problems, there are only problems which are more or less solved, he was implying that any question formulated in a yes/no fashion is an expression of narrow-mindedness. The dawning of the twenty first century was marked by the publication by the Clay Institute of the list of Millenium Problems. There are exactly seven of them, and they are all yes/no questions. For the first time a computer science-generated problem appears: the famous P/NP conjecture. Besides, Clay Problems come with a price tag: USD 10 6 for a solution of any one of them. Obviously, free market forces played no role in this pricing policy. II. Mathematics as a Cognitive Tool II.1. Some history. Old texts that are considered as sources for history of mathematics show that it started as a specific activity answering the needs of commerce and of state, servicing large communal works and warfare: cf the excerpt above about Sumero-Babylonian administrative reform. As another example, turn to the Chinese book "The nine chapters on mathematical procedures" compiled during the Han dynasty around the beginning of our era. We rely here upon the report of K. Chemla at the Berlin ICM 1998, . The book generally is a sequence of problems and of their solutions which can be read as special cases of rather general algorithms so that a structurally similar problem with other values of parameters could be solved as well. According to Chemla, problems "regularly invoke concrete questions with which the bureaucracy of the Han dynasty was faced, and, more precisely, questions that were the responsibility of the "Grand Minister of Agriculture" (dasinong), such as renumerating civil servants, managing granaries or enacting standard grain measures. Moreover, the sixth of The nine chapters takes its name from an economic measure actually advocated by a Grand Minister of Agriculture, Sang Hongyang (152-82 B.C.E), to levy taxes in a fair way, a program for which the Classic provides mathematical procedures." Yet another description of the preoccupations of Chinese mathematicians is given in : "In the long history of the Chinese empire, mathematical astronomy was the only subject of the exact sciences that attracted great attention from rulers. In every dynasty, the royal observatory was an indispensable part of the state. Three kinds of expert -mathematicians, astronomers and astrologers -were employed as professional scientists by the emperor. Those who were called mathematicians took charge of establishing the algorithms of the calendar-making systems. Most mathematicians were trained as calendar-makers. Calendar-makers were required to maintain a high degree of precision in prediction. Ceaseless efforts to improve numerical methods were made in order to guarantee the precision required for astronomical observation . It was neither necessary nor possible that a geometric model could replace the numerical method, which occupied the principal position in Chinese calendar-making system. As a subject closely related to numerical method, algebra, rather than geometry, became the most developed field of mathematics in ancient China." Western tradition goes back to Greece. According to Turnbull , we owe the word "mathematics" and the subdivision of mathematics into Arithmetic and Geometry to Pythagoras (569 -500 BC). More precisely, Arithmetic (and Music) studies the discrete, whereas Geometry and Astronomy study the continued. The secondary dichotomy Geometry/Astronomy reflects the dichotomy The stable/The moving. With small modifications, this classification was at the origin of the medieval "Quadrivium of knowledge", and Michael Atiyah's overall view of mathematics still bears distinctive traces of it. in Republic, Book VII, 525c, explains why the study of arithmetic is essential for an enlightened statesman: "Then this is a kind of knowledge, Glaucon, which legislation may fitly prescribe; and we must endeavour to persuade those who are prescribed to be the principal men of our State to go and learn arithmetic, not as amateurs, but they must carry on the study until they see the nature of numbers with the mind only; nor again, like merchants or retail-traders, with a view to buying or selling, but for the sake of their military use, and of the soul herself; and because this will be the easiest way for her to pass from becoming to truth and being." With gradual emergence of "pure mathematics", return to practical needs began to be classified as applications. The opposition pure/applied mathematics as we know it now certainly has already crystallized by the beginning of the nineteenth century. In France, Gergonne was publishing the Annales de mathématiques pures et appliquées which ran from 1810 to 1833. In Germany Crelle founded in 1826 the Journal für die reine und angewandte Mathematik. II.2. Cognitive tools of mathematics. In order to understand how mathematics is applied to the understanding of real world, it is convenient to subdivide it into the following three modes of functioning: model, theory, metaphor. A mathematical model describes a certain range of phenomena qualitatively or quantitatively but feels uneasy pretending to be something more. From Ptolemy's epicycles (describing planetary motions, ca 150) to the Standard Model (describing interactions of elementary particles, ca 1960), quantitative models cling to the observable reality by adjusting numerical values of sometimes dozens of free parameters (≥ 20 for the Standard Model). Such models can be remarkably precise. Qualitative models offer insights into stability/instability, attractors which are limiting states tending to occur independently of initial conditions, critical phenomena in complex systems which happen when the system crosses a boundary between two phase states, or two basins of different attractors. A recent report is dedicated to predicting of surge of homicides in Los Angeles, using as methodology the pattern recognition of infrequent events. Result: "We have found that the upward turn of the homicide rate is preceded within 11 months by a specific pattern of the crime statistics: both burglaries and assaults simultaneously escalate, while robberies and homicides decline. Both changes, the escalation and and the decline, are not monotonic, but rather occur sporadically, each lasting some 2-6 months." The age of computers has seen a proliferation of models, which are now produced on an industrial scale and solved numerically. A perceptive essay by R. M. Solow ( , written in 1997) argues that modern mainstream economics is mainly concerned with model-building. Models are often used as "black boxes" with hidden computerized input procedures, and oracular outputs prescribing behavior of human users, e. g. in financial transactions. What distinguishes a (mathematically formulated physical) theory from a model is primarily its higher aspirations. A modern physical theory generally purports that it would describe the world with absolute precision if only it (the world) consisted of some restricted variety of stuff: massive point particles obeying only the law of gravity; electromagnetic field in a vacuum; and the like. In Newton's law for the force Gm r 2 acting on a point in the central gravity field, Gm and r might be concessions to measurable reality, but 2 in r 2 is a rock solid theoretical 2, not some 2, 000000003..., whatever experimentalists might measure to the contrary. A good quantitative theory can be very useful in engineering: a machine is an artificial fragment of the universe where only a few physical laws are allowed to dominate in a well isolated material environment. In this function, the theory supplies a model. A recurrent driving force generating theories is a concept of a reality beyond and above the material world, reality which may be grasped only by mathematical tools. From Plato's solids to Galileo's "language of nature" to quantum superstrings, this psychological attitude can be traced sometimes even if it conflicts with the explicit philosophical positions of the researchers. A (mathematical) metaphor, when it aspires to be a cognitive tool, postulates that some complex range of phenomena might be compared to a mathematical construction. The most recent mathematical metaphor I have in mind is Artificial Intelligence (AI). On the one hand, AI is a body of knowledge related to computers and a new, technologically created reality, consisting of hardware, software, Internet etc. On the other hand, it is a potential model of functioning of biological brains and minds. In its entirety, it has not reached the status of a model: we have no systematic, coherent and extensive list of correspondences between chips and neurons, computer algorithms and brain algorithms. But we can and do use our extensive knowledge of algorithms and computers (because they were created by us) to generate educated guesses about structure and function of the central neural system: see and . A mathematical theory is an invitation to build applicable models. A mathematical metaphor is an invitation to ponder upon what we know. Susan Sontag's essay about (mis)uses of the "illness" metaphor in is a useful warning. Of course, the subdivision I have just sketched is not rigid or absolute. Statistical studies in social sciences often vacillate between models and metaphors. With a paradigm change, scientific theories are relegated to the status of outdated models. But for the sake of our exposition, it is a convenient way to organize synchronic and historical data. I will now give some more details about these cognitive tools, stressing models and related structures. II.3. Models. One can analyze the creation and functioning of a mathematical model by contemplating the following stages inherent in any systematic study of quantifiable observations. i) Choose a list of observables. ii) Devise a method of measurement: assigning numerical values to observables. Often this is preceded by a more or less explicit ordering of these values along an axis ("more -less" relation); then measurement is expected to be consistent with ordering. iii) Guess the law(s) governing the distribution of observables in the resulting, generally multidimensional, configuration space. The laws can be probabilistic or exact. Equilibrium states can be especially interesting; they are often characterized as stationary points of an appropriate functional defined on the whole configuration space. If time is involved, differential equations for evolution enter the game. Regarding the idea of "axis", one should mention its interesting and general cultural connotations expounded by Karl Jaspers. Jaspers postulated a transition period to modernity around 500 BC, an "axial time" when a new human mentality emerged based on the opposition between immanence and transcendence. For us relevant here is the image of oppositions as opposite orientations of one and the same axis, and the idea of freedom as a freedom of choice between two incompatible alternatives. This is also the imagery behind the standard physical expression "degrees of freedom", which is now almost lost, as usually happens to images when they become terms. The idea of measurement, which is the base of modern science, is so crucial that it is sometimes uncritically accepted in model-building. It is important to keep in mind its restrictions. In the quantum mode of description of the microworld, a "measurement" is a very specific interaction which produces a random change of the system state, rather than furnishing information about this state. In economics, money serves as the universal axis upon which "prices" of whatever are situated. "Measurement" is purportedly a function of market forces. The core intrinsic contradiction of the market metaphor (including the outrageous "free market of ideas") is this: we are projecting the multidimensional world of incomparable and incompatible degrees of freedom to the one-dimensional world of money prices. As a matter of principle, one cannot make it compatible with even basic order relations on these axes, much less compatible with non-existent or incomparable values of different kinds. In this respect, the most oxymoronic use of the market metaphor is furnished by the expression "free market of ideas". Only one idea is on sale at this market: that of "free market". II.3.1. A brief glossary of measurement. A general remark about measurement: for each "axis" we will be considering, the history of measurements starts with the stage of "human scale" and involves direct manipulation with material objects. Gradually it evolves to much larger and much smaller scales, and in order to deal with the new challenges posed by this evolution, more and more mathematics is created and used. COUNTING. We suggest to the reader to reread the subsection on Natural Numbers above as a glimpse into the history of counting (and accounting). It shows clearly how the transition from counting small quantities of objects ("human scale") to the scale of state economy stimulated the creation and codification of a place-value notation. Skipping other interesting developments, we must briefly mention what Georg Cantor justifiably considered as his finest achievement: counting "infinities" and the discovery that there is an infinite scale of infinities of growing orders of magnitude. His central argument is structurally very similar to the Euclid's proof that there are infinitely many primes: if we have a finite or infinite set X, then the set of all its subsets P (X) has a strictly larger cardinality. This is established by Cantor's famous "diagonal" reasoning. Cantor's theory of infinite sets produces an incredible extension of both aspects of natural numbers: each number measures "a quantity", and they are ordered by the relation "x is larger than y". Infinities, respectively, are "cardinals" (measure of infinity) and "ordinals" which are points on the ordered axis of growing infinities. The mysteries of Cantor's scale led to a series of unsolved (and to a considerable degree unsolvable) problems, and became the central point of many epistemological and foundational discussions in the twentieth century. The controversies and bitter arguments about the legitimacy of his mental constructions made the crowning achievement of his life also the source of a sequence of nervous breakdowns and depressions which finally killed him as the world war I was slowly grinding the last remnants of Enlightenment's belief in reason. SPACE AND TIME. Human scale measurements of length must have been inextricably related to those of plots, and motivated by agriculture and building. A stick with two notches, or a piece of string, could be used in order to transport a measure of length from one place to another. Euclid's basic abstraction: an infinitely rigid and infinitely divisible plane, with its hidden symmetry group of translations and rotations, with its points having no size, lines stretching uninterrupted in two directions, perfect circles and triangles, must have been a refined mental image of the ancient geodesy. Euclid's space geometry arguably was even closer to the observable world, and it is remarkable, that he systematically produced and studied abstractions of two-, one-, and zerodimensional objects as well. Pythagoras's theorem was beautifully related to arithmetic in the practice of Egyptian builders: the formula 3 2 +4 2 = 5 2 could be transported into a prescription for producing a right angle with the help of a string with uniformly distanced knots on it. When Eratosphene of Alexandria (ca 200 BC) devised his method for producing the first really large scale scientific length measurement, that of the size of the Earth, he used the whole potential of Euclid's geometry with great skill. He observed that at noon on the day of summer solstice at Syene the sun was exactly at the zenith since it shined down a deep well. And at the same time at Alexandria the distance of the sun to the zenith was one fiftieth of the circumference. Two additional pieces of observational data were used. First, that the distance between Syene and Alexandria, which was taken to be 5000 Greek stades (this is also a large scale measurement, probably, based upon the time needed to cover this distance). Second, the assumption that Syene and Alexandria lie on the same meridian. The remaining part of Eratosphene's measurement method is based upon a theoretical model. Earth is supposed to be round, and Sun to be at an essentially infinite distance from its center, so that the lines of sight from Syene and Alexandria to the Sun are parallel. Then an easy Euclidean argument applied to the cross-section of the Earth and outer space passing through Syene, Alexandria, and the Sun, shows that the distance between Syene and Alexandria must be one fiftieth of the Earth circumference, which gives for the latter the value 250000 stades. (According to modern evaluation of Greek stade, this is a pretty good approximation.) Implicit in this argument is an extended symmetry group of the Euclidean plane including, with translations and rotations, also rescalings: changing all lengths simultaneously in the same proportion. The practical embodiment of this idea, that of a map, was crucial for an immense amount of human activities, including geographical discoveries all over the globe. The attentive reader has remarked already that time measurements crept into this description (based upon a book of Cleomedes "De motu circulari corporum caelestium", middle of the first century BC). In fact, how do we know that we are looking at the position of sun at the same moment in Alexandria and Syene, distanced by 5000 stades? The earliest human scale time measurement were connected with periodical cycles of day/night and approximate position of sun on the sky. Sky dials, referred to by Cleomedes and Eratosthenes, translate time measurements into space measurements. The next large scale measurements of time are related to the seasons of the year and periodicity of religious events required in the community. Here to achieve the necessary precision, mathematical observational astronomy is needed. It is used first to register irregularities in the periodicity of year, so basically in the movement of Earth in the solar system. The mathematics which is used here involves numerical calculation based on interpolation methods. Next level of large scale: chronology of "historical time". This proved to be a rather un-mathematical endeavor. Geological and evolutionary time returns us to science: the evolution of Earth structures and of life is traced on the background of a well developed understanding of physical time which is highly mathematicized; however, the changes are so gradual and the evidence so scattered that precision of measurements ceases to be accessible or essential. Besides the plethora of observational data, brilliant guesses, and very elementary accompanying reasoning, one small piece of mathematics becomes essential for dating: the idea that radioactive decay leaves remnants of the decaying substance whose quantity diminishes exponentially with time. One very original version of this idea was used in "glottochronology": the dating of protostates of living languages which were reconstructed using methods of comparative linguistics. The sheer span of geological and evolutionary time when it was first recognized and scientifically elaborated presented a great challenge to the dogmata of (Christian) faith: discrepancy with the postulated age of the World since the time of Creation became gaping. Time measurements at a small scale become possible with invention of clocks. Sundials use relative regularity of visible solar motion and subdivide daytime into smaller parts. Water and sand clocks measure fixed stretches of time. This uses the idea of reproducibility of some well controlled physical processes. Mechanical clocks add to this artificial creation of periodic processes. Modern atomic clocks use subtle enhancing methods for exploiting natural periodic processes on a microscale. Still, time remains a mystery, because we cannot freely move in it as we do in space, we are dragged to who knows where, and St Augustine reminds us about this perennial, un-scientific torment: "I know that I am measuring time. But I am not measuring the future, for it is not yet; and I am not measuring the present because it is extended by no length; and I am not measuring the past because it no longer is. What is it, therefore, that I am measuring? " (Confessions, Book XI, XXVI.33). CHANCE, PROBABILITY, FINANCE. Connotations of the words "chance" and "probability" in the ordinary speech do not have much in common with mathematical probability: see for an interesting analysis of semantics of related words in several ancient and modern European languages. Basically, they invoke the idea of human confidence (or otherwise) in an uncertain situation. Measurements of probability, and mathematical handling of the results, refer not to the confidence itself which is a psychological factor, but to objective numerical characteristics of reality, initially closely related to count. If a pack contains 52 cards and they are well shuffled, the probability to pick the queen of spades is 1/52. Elementary but interesting mathematics enters when one starts calculating probabilities of various combinations ("good hands"). Implicitly, such calculations involve the idea of symmetry group: we not only count the number of cards in the pack, or number of good hands among all possible, but assume that each one is equally probable if the game is fair. The mathematics of gambling was one source of probability theory, while another was the statistics of banking, commerce, taxation etc. Frequencies of various occurences and their stability led to the notion of empirical probability and to the more or less explicit idea of "hidden gambling", the unobservable realm of causes which produced observable frequences with sufficient regularity in order to fit into a mathematical theory. The modern definition of a probability space is an axiomatization of such an image. Money started as a measure of value and made a crucial transition to the world of probability with the crystallization of credit as a main function of a bank system. The etymology of the word "credit" again refers to the idea of human confidence. The emergent "culture of finance", according to the astute analysis of Mary Poovey in , drastically differs from an economy of production "which generates profit by turning labor power into products that are priced and and exchanged in the market". Finance generates profit, in particular, "through placing complex wagers that future prices will raise or fall" ( , p. 27), that is, through pure gambling. The scale of this gambling is staggering, and the incredible mixture of real and virtual worlds in the culture of finance is explosive. INFORMATION AND COMPLEXITY. This is an example of a quite sophisticated and contemporary measurement paradigm. As with "chance" and "probability", the term quantity of information, which became one of the important theoretical notions in the second half of the twentieth century after the works of Claude Shannon and Andrei Kolmogorov, has somewhat misleading connotations. Roughly speaking, the quantity of information is measured simply by the length of a text needed to convey it. In the everyday usage, this measure seems to be rather irrelevant, first, and disorienting, second. We need to know whether information is important and reliable: these are qualitative rather than quantitative characteristics. Moreover, importance is a function of cultural, scientific, or political context. And in any case, it seems preposterous to measure the information content of "War and Peace" by its sheer volume. However, quantity of information becomes central if we are handling information without bothering about its content or reliability (but paying attention to security), which is the business of the media and communication industry. The total size of texts transmitted daily by Internet, mass media and phone services is astounding and far beyond the limits of what we called "human scale". Shannon's basic ideas about measuring quantity of information can be briefly explained as follows. Imagine first that the information you want to transmit is simply the answer "yes" or "no" to a question of your correspondent. For this, it is not even necessary to use words of any natural language: simply transmit 1 for "yes" and 0 for "no". This is one bit of information. Suppose now that you want to transmit a more complex data and need a text containing N bits. Then the quantity of information you transmit is at least bounded from above by N , but how do you know that you cannot use a shorter text to do the same job? In fact, there exist systematic methods of compressing the raw data, and they were made explicit by Shannon. The most universal of them starts with the assumption that in the pool of texts you might be wanting to transmit not all are equally probable. In this case you might change encoding in such a way that the more probable texts will get shorter codes than less probable ones, and thus save on the volume of transmission, at least in average. Here is how one can do it in order to encode texts in a natural language. Since there are about 30 letters of alphabet, and 2 5 = 32, one needs 5 bits to encode each one, and thus to get a text whose bit-length is about 5 times its letter-length. But some letters statistically are used much more often than others, so one can try to encode them by shorter bit sequences. This leads to an optimization problem that can be explicitly solved, and the resulting length of an average compressed text can be calculated. This is essentially the definition of Shannon's and Kolmogorov's entropy. Using the statistical paradigm of measurement, the creators of Google found an imaginative solution for the problem of assigning numerical measure to the relevance of information as well. Roughly speaking, a search request makes Google produce a list of pages containing a given word or expression. Typically, the number of such pages is very large, and they must be presented in the order of decreasing importance/relevance. How does Google calculate this order? Each page has hypertext links to other pages. One can model the whole set of pages on the Web by the vertices of an oriented graph whose edges are links. One can assume in the first approximation that importance of a page can be measured by the number of links pointing to it. But this proposal can be improved upon, by noting that all links are not equal: a link from an important page has proportionately more weight, and a link from a page that links to many other pages has proportionately less weight. This leads to an ostensibly circular definition (we omit a couple of minor details): each page imparts its importance to the pages it links to, divided equally between them; each page's importance is what it receives from all pages that link to it. However a classical theorem due to A. Markov shows that this prescription is well defined. It remains to calculate the values of importance and to range pages in their decreasing order. Let us now return to the Shannon's optimal encoding/decoding procedures. The reader has noticed that economy on transmission has its cost: encoding at the source and decoding at the target of information. What happens if we allow more complex encoding/decoding procedures in order to achieve further degree of compression? The following metaphor here might be helpful: an encoded text at the source is essentially a program P for obtaining the decoded text Q at the target. Let us now allow to transmit arbitrary programs that will generate Q; perhaps we will be able to choose the shortest one and to save resources. A remarkable result due to Kolmogorov is that this is a well defined notion: such shortest programs P exist and their length (the Kolmogorov complexity of Q) does not depend essentially on the programming method. In other words, there exists a totally objective measure of the quantity of information contained in a given text Q. Bad news, however, crops up here: a) one cannot systematically reconstruct P knowing Q (unlike the case of Shannon entropy); b) it may take a very long time decoding Q from P even if P is known and short. A very simple example: if Q is a sequence of exactly 10 10 10 1's, one can transmit this sentence, and let the addressee bother with the boring task of printing 10 10 10 1's out. This means that Kolmogorov's complexity, a piece of beautiful and highly sophisticated (although "elementary") mathematics, is not a practical measure of quantity of information. However, it can be used as a powerful metaphor elucidating various strengths and weaknesses of the modern information society. It allows us to recognize one essential way in which scientific (but also everyday life) information used to be encoded. The basic physical "laws of nature" (Newton's F = ma, Einstein's E = mc 2 , the Schrödinger equation etc. ) are very compressed programs for obtaining relevant information in concrete situations. Their Kolmogorov complexity is clearly of human size, they bear names of humans associated with their discovery, and their full information content is totally accessible to a single mind of a researcher or a student. Nowadays, such endeavors as the Human Genome projects provide us with huge quantities of scientific data whose volume in any compressed form highly exceeds the capability of any single mind. Arguably, similar databases that will be created for understanding the central nervous system (brain) will present the same challenge, having Kolmogorov complexity of comparable size with their volume. Thus, we are already studying those domains of material world whose descriptions have much higher information content (Kolmogorov complexity) than the ones that constituted the object of classical science. Without computers, neither the collective memory of observational data nor their processing would be feasible. What will happen when the total essential new scientific "knowledge" and its handling will have to be relegated to large computer databases and nets? III. Mathematical Sciences and Human Values III.1. Introduction. Commenting on the fragments of the Rhind papyrus, a handbook of Egyptian mathematics written about 1700 BC, the editor of the whole anthology James R. Newman writes (vol. I, p. 178, published in 1956) : "It seems to me that a sound appraisal of Egyptian mathematics depends upon a much broader and deeper understanding of human culture than either Egyptologists or historians of science are wont to recognize. As to the question how Egyptian mathematics compares with Babylonian or Mesopotamian or Greek mathematics, the answer is comparatively easy and comparatively unimportant. What is more to the point is to understand why the Egyptians produced their particular kind of mathematics, to what extent it offers a culture clue, how it can be related to their social and political institutions, to their religious beliefs, their economic practices, their habits of daily living. It is only in these terms that their mathematics can be judged fairly." By 1990, this became a widely accepted paradigm, and D'Ambrosio coined the term "Ethnomathematics" for it (cf ). Our collage, and the whole project of which it is a part, is a brief self-presentation of ethnomathematics of Western culture, observed from the vantage point of the second half of the twentieth century. Probably the most interesting intracultural interactions involving mathematics are those that are not direct but rather proceed via the mediation of value systems. A value system influences activities in each domain and practically determines their cultural interpretation. Conversely, an emerging value system in one part of cultural activity (e.g. scientific) starts a process of reconsideration of other ones, their reformation, sometimes leading to their extinction or total remodeling. This is why in the last section I briefly touch upon human values in the context of mathematical creativity. III.2. Rationality. Let us listen again to J. R. Newman (Introduction to vol. I of ): "... I began gathering the material for an anthology which I hoped would convey something of the diversity, the utility and the beauty of mathematics". The book "... presents mathematics as a tool, a language and a map; as a work of art and an end to itself; as a fulfillment of the passion for perfection. It is seen as an object of satire, a subject for humor and a source of controversy; as a spur to wit and a leaven to the storyteller imagination; as an activity which has driven men to frenzy and provided them with delight. It appears in broad view as a body of knowledge made by men, yet standing apart and independent of them." In this private and emotional list of values associated with mathematics one is conspicuously absent: rationality. One possible explanation is that in the Anglo-Saxon tradition, this basic value of the Enlightenment came to be associated with economic behavior, and often gets a narrow interpretation: a rational actor is the one that consistently promotes self-interest. Another explanation is that being rational is not really delightful: "Cogito ergo sum" is an existence proof but it lacks the urgency which a living soul feels without thinking. Still, rationality in the Renaissance sense, "Il natural desiderio di sapere" (cf. ), and the drive to be consistently rational is a force without which the existence of mathematics through the centuries, and its successes in bringing its share to the technological progress of society would be impossible. III.3. Truth. Extended and complex, subtle and mutually contradictory views were expounded on the problem "truth in mathematics": see for a fairly recent review. Here I simply state that axiologically, this is one of the central values associated with mathematics, whatever its historical and philosophical correlates might be. Authority, practical efficiency, success in competition, faith, all these clashing values must recede in the mind of a mathematician when he or she sets down to do their job. III.4. Action and contemplation. By the nature of their trade, mathematicians are inclined more to contemplation than to action. The Romans, who were actors par excellence and revered Greek culture, skipped Greek mathematics. The imperial list of virtues -valor, honor, glory, service -did not leave much place for geometry. This tradition continued through centuries, but as with any tradition, there were exciting exceptions, and I will conclude this essay with a sketch of a great mathematician of the last century, John von Neumann. Neumann Jànos was born on October 28, 1903 in Budapest, and died in Washington, D.C., on February 8, 1957. During this relatively short life span, he participated in, and made crucial contributions to: the foundations of set theory, quantum statistics and ergodic theory, game theory as a paradigm of economic behavior, theory of operator algebras, the architecture of modern computers, the implosion principle for the creation of the hydrogen bomb, and much more. Here are two samples of his thinking and modes of expression, marking the beginning and the end of his career. Contemplation: The von Neumann Universe. Cantor's description of a set as an arbitrary collection of distinct elements of our thought is too generous in many contexts, and the von Neumann Universe consists only of sets whose elements are also sets. The potentially dangerous self-referentiality is avoided by postulating that any family of sets X i such that X i is an element of X i+1 has a least element; and the ultimate set, the least of all, is empty. Thus von Neumann Universe is born from a "philosophical vacuum": its first elements are ∅ (the empty set), {∅} (one-element set whose only element is the empty set), {{∅}}, {∅, {∅}} etc. Stingy curly brackets replace Cantor's Zusammenfassung . . . zu einem Ganzen, and this operation, which can be iteratively repeated, is the only one that produces new sets from the already constructed ones. Iteration can be, of course, transfinite, which was another great insight of Cantor's. It is difficult to imagine a purer object of contemplation than this quiet and powerful hierarchy. Action: Hiroshima. Excerpts from von Neumann's letter to R. E. Duncan, IBM War History section, dated December 18, 1947 ( , pp. 111-112): ' Dear Mr. Duncan In reply to your letter of December 16, I can tell you the following things: I did initiate and carry out work during the war on oblique shock reflection. This did lead to the conclusion that large bombs are better detonated at a considerable altitude than on the ground, since this leads to the higher oblique-incidence pressure referred to. I did receive the Medal for Merit (October, 1946) and the Distinguished Service Award (July, 1946). The citations are as follows: "Citation to Accompany the Award of The Medal for Merit to Dr. John von Neumann DR. JOHN VON NEUMANN, for exceptionally meritorious conduct in the performance of outstanding services to the United States from July 9, 1942 to August 31, 1945. Dr. von Neumann, by his outstanding devotion to duty, technical leadership, untiring cooperativeness, and sustained enthusiasm, was primarily responsible for fundamental research by the United States Navy on the effective use of high explosives, which has resulted in the discovery of a new ordnance principle for offensive action, and which has already been proved to increase the efficiency of air power in the atomic bomb attacks over Japan. His was a contribution of inestimable value to the war effort of the United States.
import { JSDOM } from 'jsdom'; export declare const createDom: (html?: string) => JSDOM; export declare const parseFragment: typeof JSDOM.fragment;
The development of the encoding of deictic motion in the Bantu language Rangi: grammaticalisation and change Abstract The close cross-linguistic relation between the domains of space and time has been well described. The frequent emergence of Tense-Aspect-Mood (TAM) markers from deictic motion verbs in particular, has also been extensively detailed in the literature. This paper focusses on the less well-known link between associated motion, a category of functional morphemes expressing (deictic) motion events, and TAM, in a language contact situation. Specifically, it provides a synchronic and diachronic description of three associated motion prefixes, joo-, tóó- and koo- , found in the Tanzanian Bantu language Rangi, spoken in an area of high linguistic diversity. It proposes that the prefix joo- encodes movement towards a deictic centre, tóó- encodes movement towards a goal which is not the deictic centre, and koo- encodes movement away from a deictic centre. It further contends that while tóó- and koo- have maintained a purely deictic function, joo- has grammaticalised to assume an additional function whereby it encodes future tense, possibly aided by the absence of a dedicated future tense marker in the language. This three-way morphological encoding of spatial relations on the verb form is not a common characteristic of East African Bantu languages. However, this paper proposes that the system in Rangi can be accounted for on the basis of cross-linguistically widely attested pathways of grammatical change.
mod call_context_manager; use crate::{CanisterQueues, NumWasmPages, PageMap, StateError}; pub use call_context_manager::{CallContext, CallContextAction, CallContextManager, CallOrigin}; use ic_base_types::NumSeconds; use ic_interfaces::messages::CanisterInputMessage; use ic_protobuf::{ proxy::{try_from_option_field, ProxyDecodeError}, state::canister_state_bits::v1 as pb, }; use ic_types::{ messages::{Ingress, Request, RequestOrResponse, Response, StopCanisterContext}, nominal_cycles::NominalCycles, CanisterId, Cycles, MemoryAllocation, NumBytes, PrincipalId, QueueIndex, }; use lazy_static::lazy_static; use maplit::btreeset; use serde::{Deserialize, Serialize}; use std::convert::{TryFrom, TryInto}; use std::str::FromStr; use std::{collections::BTreeSet, sync::Arc}; lazy_static! { static ref DEFAULT_PRINCIPAL_MULTIPLE_CONTROLLERS: PrincipalId = PrincipalId::from_str("ifxlm-aqaaa-multi-pleco-ntrol-lersa-h3ae").unwrap(); static ref DEFAULT_PRINCIPAL_ZERO_CONTROLLERS: PrincipalId = PrincipalId::from_str("zrl4w-cqaaa-nocon-troll-eraaa-d5qc").unwrap(); } #[derive(Clone, Debug, Default, PartialEq)] /// Canister-specific metrics on scheduling, maintained by the scheduler. // For semantics of the fields please check // protobuf/def/state/canister_state_bits/v1/canister_state_bits.proto: // CanisterStateBits pub struct CanisterMetrics { pub scheduled_as_first: u64, pub skipped_round_due_to_no_messages: u64, pub executed: u64, pub interruped_during_execution: u64, pub consumed_cycles_since_replica_started: NominalCycles, } /// State that is controlled and owned by the system (IC). /// /// Contains structs needed for running and maintaining the canister on the IC. /// The state here cannot be directly modified by the Wasm module in the /// canister but can be indirectly via the SystemApi interface. #[derive(Clone, Debug, PartialEq)] pub struct SystemState { pub controllers: BTreeSet<PrincipalId>, pub canister_id: CanisterId, // EXE-92: This should be private pub queues: CanisterQueues, pub stable_memory_size: NumWasmPages, pub stable_memory: PageMap, /// The canister's memory allocation. pub memory_allocation: MemoryAllocation, pub freeze_threshold: NumSeconds, /// The status of the canister: Running, Stopping, or Stopped. /// Different statuses allow for different behaviors on the SystemState. pub status: CanisterStatus, /// Certified data blob allows canisters to certify parts of their state to /// securely answer queries from a single machine. /// /// Certified data is set by the canister by calling ic0.certified_data_set. /// /// It can be at most 32 bytes long. For fresh canisters, this blob is the /// empty blob. /// /// See also: /// * https://sdk.dfinity.org/docs/interface-spec/index.html#system-api-certified-data pub certified_data: Vec<u8>, pub canister_metrics: CanisterMetrics, /// A canister's state has an associated cycles balance, and may `send` a /// part of this cycles balance to another canister. /// In addition to sending cycles to another canister, a canister `spend`s /// cycles in the following three ways: /// a) executing messages, /// b) sending messages to other canisters, /// c) storing data over time/rounds /// Each of the above spending is done in three phases: /// 1. reserving maximum cycles the operation can require /// 2. executing the operation and return `cycles_spent` /// 3. reimburse the canister with `cycles_reserved` - `cycles_spent` pub cycles_balance: Cycles, } /// A wrapper around the different canister statuses. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CanisterStatus { Running { call_context_manager: CallContextManager, }, Stopping { call_context_manager: CallContextManager, /// Info about the messages that requested the canister to stop. /// The reason this is a vec is because it's possible to receive /// multiple requests to stop the canister while it is stopping. All /// of them would be tracked here so that they can all get a response. stop_contexts: Vec<StopCanisterContext>, }, Stopped, } impl CanisterStatus { pub fn new_running() -> Self { Self::Running { call_context_manager: CallContextManager::default(), } } } impl From<&CanisterStatus> for pb::canister_state_bits::CanisterStatus { fn from(item: &CanisterStatus) -> Self { match item { CanisterStatus::Running { call_context_manager, } => Self::Running(pb::CanisterStatusRunning { call_context_manager: Some(call_context_manager.into()), }), CanisterStatus::Stopped => Self::Stopped(pb::CanisterStatusStopped {}), CanisterStatus::Stopping { call_context_manager, stop_contexts, } => Self::Stopping(pb::CanisterStatusStopping { call_context_manager: Some(call_context_manager.into()), stop_contexts: stop_contexts.iter().map(|context| context.into()).collect(), }), } } } impl TryFrom<pb::canister_state_bits::CanisterStatus> for CanisterStatus { type Error = ProxyDecodeError; fn try_from(value: pb::canister_state_bits::CanisterStatus) -> Result<Self, Self::Error> { let canister_status = match value { pb::canister_state_bits::CanisterStatus::Running(pb::CanisterStatusRunning { call_context_manager, }) => Self::Running { call_context_manager: try_from_option_field( call_context_manager, "CanisterStatus::Running::call_context_manager", )?, }, pb::canister_state_bits::CanisterStatus::Stopped(pb::CanisterStatusStopped {}) => { Self::Stopped } pb::canister_state_bits::CanisterStatus::Stopping(pb::CanisterStatusStopping { call_context_manager, stop_contexts, }) => { let mut contexts = Vec::<StopCanisterContext>::with_capacity(stop_contexts.len()); for context in stop_contexts.into_iter() { contexts.push(context.try_into()?); } Self::Stopping { call_context_manager: try_from_option_field( call_context_manager, "CanisterStatus::Stopping::call_context_manager", )?, stop_contexts: contexts, } } }; Ok(canister_status) } } impl SystemState { pub fn new_running( canister_id: CanisterId, controller: PrincipalId, initial_cycles: Cycles, freeze_threshold: NumSeconds, ) -> Self { Self::new( canister_id, controller, initial_cycles, freeze_threshold, CanisterStatus::new_running(), ) } pub fn new_stopping( canister_id: CanisterId, controller: PrincipalId, initial_cycles: Cycles, freeze_threshold: NumSeconds, ) -> Self { Self::new( canister_id, controller, initial_cycles, freeze_threshold, CanisterStatus::Stopping { call_context_manager: CallContextManager::default(), stop_contexts: Vec::default(), }, ) } pub fn new_stopped( canister_id: CanisterId, controller: PrincipalId, initial_cycles: Cycles, freeze_threshold: NumSeconds, ) -> Self { Self::new( canister_id, controller, initial_cycles, freeze_threshold, CanisterStatus::Stopped, ) } pub fn new( canister_id: CanisterId, controller: PrincipalId, initial_cycles: Cycles, freeze_threshold: NumSeconds, status: CanisterStatus, ) -> Self { Self { canister_id, controllers: btreeset! {controller}, queues: CanisterQueues::default(), stable_memory_size: NumWasmPages::new(0), stable_memory: PageMap::default(), cycles_balance: initial_cycles, memory_allocation: MemoryAllocation::BestEffort, freeze_threshold, status, certified_data: Default::default(), canister_metrics: CanisterMetrics::default(), } } /// Create a SystemState only having a canister_id -- this is the /// state that is expected when the "start" method of the wasm /// module is run. There is nothing interesting in the system state /// that can be accessed at that point in time, hence this /// "slightly" fake system state. pub fn new_for_start(canister_id: CanisterId) -> Self { let controller = *canister_id.get_ref(); Self::new( canister_id, controller, Cycles::from(0), NumSeconds::from(0), CanisterStatus::Stopped, ) } pub fn canister_id(&self) -> CanisterId { self.canister_id } #[doc(hidden)] pub fn set_canister_id(&mut self, canister_id: CanisterId) { self.canister_id = canister_id; } /// This method is used for maintaining the backwards compatibility. /// Returns: /// - controller id as-is, if there is only one controller. /// - DEFAULT_PRINCIPAL_MULTIPLE_CONTROLLERS, if there are multiple /// controllers. /// - DEFAULT_PRINCIPAL_ZERO_CONTROLLERS, if there is no controller. pub fn controller(&self) -> &PrincipalId { if self.controllers.len() < 2 { match self.controllers.iter().next() { None => &DEFAULT_PRINCIPAL_ZERO_CONTROLLERS, Some(controller) => controller, } } else { &DEFAULT_PRINCIPAL_MULTIPLE_CONTROLLERS } } pub fn call_context_manager(&self) -> Option<&CallContextManager> { match &self.status { CanisterStatus::Running { call_context_manager, } => Some(call_context_manager), CanisterStatus::Stopping { call_context_manager, .. } => Some(call_context_manager), CanisterStatus::Stopped => None, } } pub fn call_context_manager_mut(&mut self) -> Option<&mut CallContextManager> { match &mut self.status { CanisterStatus::Running { call_context_manager, } => Some(call_context_manager), CanisterStatus::Stopping { call_context_manager, .. } => Some(call_context_manager), CanisterStatus::Stopped => None, } } /// Pushes a `Request` type message into the relevant output queue. /// This is preceded by withdrawing the cycles for sending the `Request` and /// receiving and processing the corresponding `Response`. /// If cycles withdrawal succeeds, the function also reserves a slot on the /// matching input queue for the `Response`. /// /// # Errors /// /// Returns a `QueueFull` error along with the provided message if either /// the output queue or the matching input queue is full. /// /// Returns a `CanisterOutOfCycles` error along with the provided message if /// the canister's cycles balance is not sufficient to pay for cost of /// sending the message. pub fn push_output_request(&mut self, msg: Request) -> Result<(), (StateError, Request)> { assert_eq!( msg.sender, self.canister_id, "Expected `Request` to have been sent by canister id {}, but instead got {}", self.canister_id, msg.sender ); self.queues.push_output_request(msg) } /// Pushes a `Response` type message into the relevant output queue. The /// protocol should have already reserved a slot, so this cannot fail. The /// canister is also refunded the excess cycles that was reserved for /// sending this response when the original request was received. /// /// # Panics /// /// Panics if the queue does not already exist or there is no reserved slot /// to push the `Response` into. pub fn push_output_response(&mut self, msg: Response) { assert_eq!( msg.respondent, self.canister_id, "Expected `Response` to have been sent by canister id {}, but instead got {}", self.canister_id, msg.respondent ); self.queues.push_output_response(msg) } /// Extracts the next inter-canister or ingress message (in that order). /// If no inter-canister messages are available in the induction pool, we /// pop the next ingress message. pub fn pop_input(&mut self) -> Option<CanisterInputMessage> { self.queues.pop_input() } /// Returns true if there are messages in the input queues, false otherwise. pub fn has_input(&self) -> bool { self.queues.has_input() } /// Pushes a `RequestOrResponse` into the induction pool. /// /// If the message is a `Request`, reserves a slot in the corresponding /// output queue for the eventual response and reserves the maximum cycles /// cost for sending the `Response` back. If it is a `Response`, the /// protocol should have already reserved a slot for it. /// /// # Notes /// * `Running` system states accept requests and responses. /// * `Stopping` system states accept responses only. /// * `Stopped` system states accept neither. /// /// # Errors /// /// Returns a `StateError` along with the provided message: /// * `QueueFull` if either the input queue or the matching output queue is /// full when pushing a `Request` message. /// * `CanisterOutOfCycles` if the canister does not have enough cycles to /// be /// * `CanisterStopping` if the canister is stopping and inducting a /// `Request` is attempted. /// * `CanisterStopped` if the canister is stopped. /// /// # Panics /// /// Panics if a `Response` message is pushed into a queue that does not /// already exist or there is no reserved slot in the queue. /// /// Warning! this is only exposed for test purposes and should not be used /// outside of this crate. pub fn push_input( &mut self, index: QueueIndex, msg: RequestOrResponse, ) -> Result<(), (StateError, RequestOrResponse)> { assert_eq!( msg.receiver(), self.canister_id, "Expected `RequestOrResponse` to be targeted to canister id {}, but instead got {}", self.canister_id, msg.receiver() ); match self.status { CanisterStatus::Running { .. } => self.queues.push_input(index, msg), CanisterStatus::Stopping { .. } => { // Responses are accepted and requests are rejected. match msg { RequestOrResponse::Request(req) => Err(( StateError::CanisterStopping(self.canister_id()), RequestOrResponse::Request(req), )), RequestOrResponse::Response(response) => self .queues .push_input(index, RequestOrResponse::Response(response)), } } CanisterStatus::Stopped => { // Requests and responses are both rejected. Err((StateError::CanisterStopped(self.canister_id()), msg)) } } } pub fn queues_mut(&mut self) -> &mut CanisterQueues { &mut self.queues } /// Returns a boolean whether the system state is ready to be `Stopped`. /// Only relevant for a `Stopping` system state. pub fn ready_to_stop(&self) -> bool { match &self.status { CanisterStatus::Running { .. } => false, CanisterStatus::Stopping { call_context_manager, .. } => { call_context_manager.callbacks().is_empty() && call_context_manager.call_contexts().is_empty() } CanisterStatus::Stopped => true, } } pub fn status_string(&self) -> &'static str { match self.status { CanisterStatus::Running { .. } => "Running", CanisterStatus::Stopping { .. } => "Stopping", CanisterStatus::Stopped => "Stopped", } } /// See IngressQueue::filter_messages() for documentation pub fn filter_ingress_messages<F>(&mut self, filter: F) where F: FnMut(&Arc<Ingress>) -> bool, { self.queues.filter_ingress_messages(filter); } /// Returns the memory that is currently used by the `SystemState`. pub fn memory_usage(&self) -> NumBytes { crate::num_bytes_from(self.stable_memory_size) } pub fn add_stop_context(&mut self, stop_context: StopCanisterContext) { match &mut self.status { CanisterStatus::Running { .. } | CanisterStatus::Stopped => { panic!("Should never add_stop_context to a non-stopping canister.") } CanisterStatus::Stopping { stop_contexts, .. } => stop_contexts.push(stop_context), } } /// Clears stable memory of this canister. pub fn clear_stable_memory(&mut self) { self.stable_memory = PageMap::default(); self.stable_memory_size = NumWasmPages::new(0); } /// Method used only by the dashboard. pub fn collect_controllers_as_string(&self) -> String { self.controllers .iter() .map(|id| format!("{}", id)) .collect::<Vec<String>>() .join(" ") } }
// Equal compares two kernels and returns true if they are equal. func Equal(a, b []byte) error { if len(a) != len(b) { return fmt.Errorf("images differ in len: %d bytes and %d bytes", len(a), len(b)) } var ba BzImage if err := ba.UnmarshalBinary(a); err != nil { return err } var bb BzImage if err := bb.UnmarshalBinary(b); err != nil { return err } if !reflect.DeepEqual(ba.Header, bb.Header) { return fmt.Errorf("headers do not match: %s", ba.Header.Diff(&bb.Header)) } if len(ba.KernelCode) != len(bb.KernelCode) { return fmt.Errorf("kernel lengths differ: %d vs %d bytes", len(ba.KernelCode), len(bb.KernelCode)) } if len(ba.BootCode) != len(bb.BootCode) { return fmt.Errorf("boot code lengths differ: %d vs %d bytes", len(ba.KernelCode), len(bb.KernelCode)) } if !reflect.DeepEqual(ba.BootCode, bb.BootCode) { return fmt.Errorf("boot code does not match") } if !reflect.DeepEqual(ba.KernelCode, bb.KernelCode) { return fmt.Errorf("kernels do not match") } return nil }
// NewCmdStruct construct NewCmd with string func NewCmdStruct(cmd string) (c *CmdStruct) { c = &CmdStruct{ i: 9, l: uint(len(cmd)), d: []byte(cmd), } return }
package main import ( "fmt" "os" "path/filepath" "github.com/mark-rushakoff/jty/pkg/jty" "github.com/spf13/afero" "github.com/spf13/pflag" ) func main() { fs := pflag.NewFlagSet("jty", pflag.ExitOnError) fs.Usage = func() { exe := filepath.Base(os.Args[0]) fmt.Fprintf(os.Stderr, "USAGE: %s [opts] [[INPUT_JSONNET OUTPUT_YAML]...]:\n", exe) fmt.Fprintln(os.Stderr, fs.FlagUsages()) fmt.Fprintf(os.Stderr, `ENVIRONMENT VARIABLES JSONNET_PATH is a colon-(semicolon on Windows) separated list of directories added in reverse order before the paths specified by --jpath (i.e. left-most wins). The follow three invocations are equivalent: JSONNET_PATH=a:b jty -J c -J d JSONNET_PATH=d:c:a:b jty jty -J b -J a -J c -J d `) fmt.Fprintf(os.Stderr, `EXAMPLE USES Evaluate in.jsonnet and save the resulting YAML as out.yaml: %[1]s in.jsonnet out.yaml Evaluate multiple .jsonnet files and save the resulting YAML in specific locations: %[1]s in1.jsonnet out/1.yaml conf.jsonnet conf.yaml Evaluate each .jsonnet file under the current directory, and save the .yml file adjacent to the .jsonnet file: find . -name '*.jsonnet' \ -exec bash -c 'for p in "$@"; do printf "%%s\n%%s.yml\n" "$p" "${p%%.jsonnet}" done' _ {} + | %[1]s -i Evaluate each .jsonnet file under the current directory, and for each file foo.jsonnet save a relative yml/foo.yml file (useful for tools that expect only .yml files in a directory): find . -name '*.jsonnet' \ -exec bash -c 'for p in "$@"; do printf "%%s\n%%s/yml/%%s.yml\n" "$p" "$(dirname "$p")" "$(basename "$p" .jsonnet)" done' _ {} + | %[1]s -i `, exe) } var flags jty.Flags flags.AddToFlagSet(fs) if err := fs.Parse(os.Args[1:]); err != nil { fs.Usage() os.Exit(1) } flags.FinishParse(os.Getenv("JSONNET_PATH")) if flags.HelpRequested { fs.Usage() os.Exit(0) } flags.Args = fs.Args() c := &jty.Command{ Stdin: os.Stdin, Stdout: os.Stdout, Stderr: os.Stderr, FS: afero.NewOsFs(), } if err := c.Run(&flags); err != nil { fmt.Fprintln(os.Stderr, err.Error()) os.Exit(1) } }
<filename>src/Test/WebDriver/Utils.hs module Test.WebDriver.Utils where import Data.Text (Text) import Data.Text.Encoding as TE import qualified Network.HTTP.Types.URI as HTTP urlEncode :: Text -> Text urlEncode = TE.decodeUtf8 . HTTP.urlEncode False . TE.encodeUtf8
<reponame>manodeep/GadgetFileSplitter #pragma once #include <stdio.h> #include <stdlib.h> #include <stdint.h> #include "gadget_defs.h" #include "filesplitter.h" #ifdef __cplusplus extern "C" { #endif int split_gadget(const char *filebase, const char *outfilebase, const int noutfiles, const file_copy_options copy_kind); #ifdef __cplusplus } #endif
/** * IcsPosition generated by hbm2java */ @Entity @Table(name = "ics_position", schema = "public") public class IcsPosition implements java.io.Serializable { private int code; private String name; private String posType; private String description; private Set<UserInfo> userInfos = new HashSet<UserInfo>(0); public IcsPosition() { } public IcsPosition(int code) { this.code = code; } public IcsPosition(int code, String name, String posType, String description, Set<UserInfo> userInfos) { this.code = code; this.name = name; this.posType = posType; this.description = description; this.userInfos = userInfos; } @Id @Column(name = "code", unique = true, nullable = false) public int getCode() { return this.code; } public void setCode(int code) { this.code = code; } @Column(name = "name", length = 10) public String getName() { return this.name; } public void setName(String name) { this.name = name; } @Column(name = "pos_type", length = 10) public String getPosType() { return this.posType; } public void setPosType(String posType) { this.posType = posType; } @Column(name = "description", length = 60) public String getDescription() { return this.description; } public void setDescription(String description) { this.description = description; } @ManyToMany(fetch = FetchType.LAZY, mappedBy = "icsPositions") public Set<UserInfo> getUserInfos() { return this.userInfos; } public void setUserInfos(Set<UserInfo> userInfos) { this.userInfos = userInfos; } }
<reponame>santoshghimire/IL-Jobcrawl<gh_stars>0 import smtplib import logging import mimetypes from email.mime.multipart import MIMEMultipart from email import encoders from email.mime.audio import MIMEAudio from email.mime.base import MIMEBase from email.mime.image import MIMEImage from email.mime.text import MIMEText from jobcrawl import settings import os email_from = settings.EMAIL_FROM email_to = settings.EMAIL_TO smtp_server = settings.SMTP_SERVER smtp_port = settings.SMTP_PORT username = settings.SMTP_USERNAME password = settings.SMTP_PASSWORD def send_plain_email(subject, body, to=None, multi=False): msg = MIMEMultipart() msg["From"] = email_from if to: msg["To"] = to else: email_to = settings.EMAIL_TO email_to = email_to.split(',')[0] msg["To"] = email_to msg["Subject"] = subject msg.preamble = subject textpart = MIMEText(body, 'plain') msg.attach(textpart) server = smtplib.SMTP("{}:{}".format(smtp_server, smtp_port)) server.starttls() server.login(username, password) server.sendmail(email_from, email_to.split(","), msg.as_string()) logging.info('***************************************************') logging.info('Email Successfully Sent to {} .' 'subject={}, body={}'.format(email_to, subject, body)) logging.info('***************************************************') server.quit() def send_email(directory, file_name, body, multi=False): if multi: file_to_send = ["{}/{}".format(directory, i) for i in file_name] else: file_to_send = ["{}/{}".format(directory, file_name)] if multi: subject = '{}_Daily-List-Of-Competitor-Jobs.xlsx'.format( file_name[0][:10]) else: subject = file_name msg = MIMEMultipart() msg["From"] = email_from msg["To"] = email_to msg["Subject"] = subject msg.preamble = subject textpart = MIMEText(body, 'plain') for each_file in file_to_send: attachment = get_attachment(each_file) attachment.add_header( "Content-Disposition", "attachment", filename=os.path.basename(each_file) ) msg.attach(attachment) msg.attach(textpart) server = smtplib.SMTP("{}:{}".format(smtp_server, smtp_port)) server.starttls() server.login(username, password) server.sendmail(email_from, email_to.split(","), msg.as_string()) logging.info('***************************************************') logging.info('Email Successfully Sent to {} .' 'directory={}, file_name={}, body={}' ''.format(email_to, directory, file_name, body)) logging.info('***************************************************') server.quit() def get_attachment(file_to_send): ctype, encoding = mimetypes.guess_type(file_to_send) if ctype is None or encoding is not None: ctype = "application/octet-stream" maintype, subtype = ctype.split("/", 1) if maintype == "text": fp = open(file_to_send) # Note: we should handle calculating the charset attachment = MIMEText(fp.read(), _subtype=subtype) fp.close() elif maintype == "image": fp = open(file_to_send, "rb") attachment = MIMEImage(fp.read(), _subtype=subtype) fp.close() elif maintype == "audio": fp = open(file_to_send, "rb") attachment = MIMEAudio(fp.read(), _subtype=subtype) fp.close() else: fp = open(file_to_send, "rb") attachment = MIMEBase(maintype, subtype) attachment.set_payload(fp.read()) fp.close() encoders.encode_base64(attachment) return attachment
def lock_time(self): try: ctime = os.lstat(self.lockfilename)[9] except os.error, why: raise Error,\ 'could not read file lock info for "%s": %s' % \ (self.filename, why) return ctime
Jan 23, 2017; Miami, FL, USA; Miami Heat forward Willie Reed (35) reacts after a charging foul was called on Golden State Warriors forward Kevin Durant (not pictured) during the second half at American Airlines Arena. The Heat defeated the Golden State Warriors 105-102. Mandatory Credit: Steve Mitchell-USA TODAY Sports The Stretch 4: 13-Okaro and an end to the winning streak by Allana Tachauer One last look at the Miami Heat’s incredible 13-game win streak by the numbers. In a span of merely three weeks, the Miami Heat went from 11-30 to two games out of 8th place in the Eastern Conference. The team’s 13-game win streak is arguably the best story to come out of the NBA season thus far. At the very least, the unlikeliest. Unfortunately, as the saying goes, all good things must come to an end. Miami’s 117-109 loss to the Philadelphia 76ers on Saturday night was demoralizing. The Heat looked drained, it being the last game of a four-game road trip. Without Dion Waiters — still nursing a bum ankle, but expected back on Monday night — they stood little chance. (How far we’ve come that Waiters has gone from liability to a borderline necessity.) So now, we take a look back at the streak. Consider it a postmortem on the greatness that was. A breakdown of the most interesting stats we could find. Waiters Island is getting crowded Where else could we start off but on Waiters Island. Although he was only able to suit up for the first 11 games of the streak before turning his ankle late against the Minnesota Timberwolves, Waiters’ play drew national attention. Miami was able to squeak out a couple of wins without him, but his absence was clearly felt. In the 11 games he played, Waiters was spectacular. He averaged 20.6 points, 4.2 rebounds, and 4.8 assists, on 49 percent shooting, and 49 percent from three. Waiters led Miami in plus-minus, with a plus-9.1. (No one else on the team was over a plus-7.3.) In fact, that mark was so impressive that it put him at 11th in the NBA for plus-minus between January 17th and February 7th — the day the streak started and the night he got hurt. Oh, and who could forget… he also displayed a clutch gene (I hate myself for writing that) that most didn’t know he had. Waiters hit a fairly memorable shot against a half-decent opponent on January 23rd. He did it again two nights later. Dion Waiters (14 of 24 points in 4th) helps Heat erase 18-point deficit, beat Nets 109-106 HIGHLIGHTS: https://t.co/P1rMJtyxDJ #HEATIsON pic.twitter.com/JvUgMG5rXR — NBA.com (@NBAcom) January 26, 2017 Thus, Waiters Island is now nearly to capacity. There is still some room though, if you’d like to join us. As Zach Lowe wrote last week, we are an inclusive community. The Heat were on fire (sorry!) from three For the span of Miami’s streak, the team was on fire from beyond-the-arc. They ranked 1st-overall in the NBA in three-point shooting at 42 percent. To convert a better rate from three than the Golden State Warriors for over 15 percent of the season is outright crazy. If you look at just the players who attempted more than one three-pointer, the team’s barrage from deep was led by Goran Dragic at 53.8 percent. Followed by Waiters, who was at an even 50 percent, and Okaro White (!!!), who shot 47 percent. In all, the Heat had five guys shoot over 40 percent from three during the streak, and a sixth — in Rodney McGruder — at 39.5 percent. We can’t forget about Willie Reed, either. Reed made every three-pointer he attempted for the summation of the run. He went 1-for-1. Fire emoji. A surprising leader in net-rating If I were to ask you right now: Who led Miami in net-rating over the 13 games prior to the loss against Philly, you’d probably want to know what the heck net-rating is. In response, I would tell you that it’s an advanced metric that measures point differential per 100 possessions. (Yes, defense is taken into account.) Then, you’d start guessing. Most likely with Dragic or Waiters. You’d be wrong in either case. If not those two, then it certainly had to have been Hassan Whiteside. Nope. Ah, then it must have been either of the Johnson’s. Tyler or James. Doesn’t matter which. Wrong again. Ready to give up? It was Willie freaking Reed. No, seriously. Him. Reed led the Heat with a 16.0 net-rating for the duration of the win streak. Second was Wayne Ellington at 14.7, trailed by Waiters with a 14.4. Dragic was a healthy 9.8, J. Johnson a 9.7. The two lowest net-ratings (among players who participated in more than two games) were Whiteside’s and T. Johnson’s — 8.9 and 4.0 respectively. (It’s just a little concerning that Whiteside was so greatly outplayed by his backup during the Heat’s best run of the season. We can chalk it up to sample size, I guess. Yeah, we’ll just go with that.) Miami’s bench was among the NBA’s best Do you guys remember how incredible the Big Three era was in Miami? I mean, of course you do. Being the center of the basketball world and making four-straight Finals was an amazing experience. What you may have forgotten — or made yourself forget — is just how miserable the Heat’s bench play was back then. Norris Cole was young and prone to mistakes. Mike Miller always had some sort of injury. Shane Battier and Ray Allen were on their last legs. Although each of those guys had their moments of absolute brilliance, their overall play was poor. (I’m not even going to mention Dexter Pittman, Greg Oden, or Eddy Curry. I refuse to go there.) In three of the four Big Three years, the Heat ranked 24th or lower in bench scoring. In 2010-11, they were dead-last, at 21.9 points per game. (They did improve to 17th in 2013-14.) These days, now that Miami has been (in a sense) freed from paying three players max salaries, they’ve had the money to build a respectable bench. On the season, the Heat’s reserves are 11th in scoring, with 37.6 points per game. However, they took that to an even higher plane during the win streak. Between January 17th and February 10th, Miami’s bench scored 40.0 points per game, good for sixth-best in the NBA. They were ninth in field goal percentage at 45.1 percent, and 1st in net-rating at 9.8. What makes those numbers even more impressive is when you factor in the team’s injuries. Josh Richardson hasn’t played in weeks, forcing McGruder into a starting role. Neither has Justise Winslow. T. Johnson missed a few games in there, as well. Just goes to show how effective guys like Reed and J. Johnson have been. Which brings us to our final point. James Johnson was the spark plug off the Heat’s bench J. Johnson was signed to a one-year deal by the Heat this past offseason without much ballyhoo. And who could blame the general media for ignoring the news? After all, he had been nothing more than a career journeyman before arriving in Miami. A guy with a lot of talent to be sure, but without the want to be anything more than a role player. (He basically said as much himself). So to watch him have this career resurgence with the Heat has been nothing short of stunning. J. Johnson is setting career-highs in multiple categories, all while being the team’s most versatile defender and leader of the second unit. During Miami’s run of excellence, he took his game up another notch, too. J. Johnson averaged 13.9 points per game during the streak, to go along with 5.1 rebounds and 4.2 assists. He shot 48 percent from the floor, and a solid 73 percent from the foul line. On top of that, he also blocked 1.3 shots and had 1.3 steals a night. His defensive-rating was 6th-best in the NBA among players coming off the bench who played at least eight games. Just dominant overall play from J. Johnson, on both sides of the ball. The Heat don’t rip off 13 wins in a row without him, you can be sure of that. So of Heat owner Micky Arison, and team president Pat Riley, I ask just one thing: Pay the man and keep him in Miami this summer.
By Yvonne Renfrew | Tuesday, October 11, 2011 Originally published on August 9, 2011 in our free SmallLaw newsletter. Instead of reading SmallLaw here after the fact, sign up now to receive future issues in realtime. At the risk of being accused of sexism, I suspect female SmallLaw subscribers are more conversant than readers of the male persuasion with the concept of "shopping in your closet." But with hard times for many solo and small law firms (not to mention your stock portfolio), "shopping in your software closet" may, at least in the short term, prove a wise option. This particular trek down memory lane may revive not only fond memories of software long since abandoned, but in fact may restore to your consciousness a particular species of software much needed, but which was not yet ready for primetime when you originally purchased it at Egghead on floppy diskettes. A History of the PIM and Super PIM Take personal information managers for example. The DOS program Sidekick — a "TSR" (terminate and stay resident) program now long-since dead and buried — was perhaps the first widely used PIM, and although fairly rudimentary in its functions, it laid the inspiration for what was to come. The ensuing history of PIMs is littered with the dead and dying remains of a special breed that actually went far beyond serving as repositories for addresses, telephone numbers, appointments and the like thanks to "customizability." With these programs, we could actually organize our information in a way that made sense for our law practice rather than in a manner dictated by the software publisher. Let's call these "Super PIMs." Foremost among the now nearly extinct Super PIMs (more below on the survivors) may be the well-loved and greatly lamented Ecco Pro. Originally written by Robert Perez and Pete Polash, founders of Arabesque software, Ecco Pro was later sold to NetManage, which (despite Perez's continuing involvement) ceased further development of the software in 1997 (see the TechnoLawyer Archive for several Ecco Pro eulogies). Ecco Pro was the finest, most versatile, and most powerful information manager easily accessible to the rank and file of computer users (as opposed to the technologically elite who could master the much steeper learning curves of more demanding idea and information managers such as Lotus Agenda — not to be confused with Lotus Organizer — and GrandView). Ecco Pro was and, thanks to a cult following, remains a strong favorite of software cognoscenti. Although it served also as a repository for the usual contact and appointment information, its greatest value was found in its outlining function, which permitted assigning any outline item to nearly any number of "categories," which could (at the user's option) be shown as columns containing information of specified kinds (e.g., text, dates, drop-down choice lists, check boxes) about any (or all) individual items appearing in the outline, and could link any outline item to any external file. Among its many other features, Ecco Pro installed an icon (the "Shooter") into other programs so that you can add text highlighted in the other program to your Ecco Pro outline. And better yet, the information stored in Ecco Pro could be synchronized with the then nearly ubiquitous PalmPilot hardware PIMs. The software has languished for more than a decade. Yet so fanatical are Ecco Pro die-hards that volunteers have continued to develop and update the program (including a 32-bit architecture), which remains available for download. A visit to this page is worthwhile regardless of your interest in Ecco Pro as it will show you what really good software was like "back in the day," and will doubtless answer any questions you may have as to why so many thousands of very experienced users still quest for "modern" software that will live up to the standards of usability and value so long ago set by Ecco Pro. The demise of Ecco Pro was blamed by many (including the publishers of Ecco Pro themselves) on Microsoft's decision to bundle Outlook with Office at no extra charge. And while that was undoubtedly part of the problem, Ecco Pro also failed by marketing itself as merely a fancy PIM to lawyers and others then lacking technological sophistication sufficient to permit them to appreciate that the value and functionality of the product went so far beyond that of supposedly "free" Outlook that the two might as well have originated on different planets. Other legendary Super PIMs are similarly admirable, although not as realistically usable in today's law firm even if still available. For example, the remarkable DOS-based Lotus Agenda written by Lotus co-founder Mitch Kapor was described by Scott Rosenberg in his excellent article reviewing the evolution of PIMs, From Agenda to Zoot as the "granddaddy" of the free-form PIM. If you're still grokking DOS, you can dowload a copy. Agenda was abandoned by Lotus after only a single upgrade in favor of the inferior (but more easily marketed to the masses) Lotus Organizer — a move that contributes to my view of Lotus as a company with the "reverse Midas touch" given the number of excellent programs that met their demise under the company's stewardship. Symantec, another software publisher I regard as too often traveling in the wrong direction on the road between the ridiculous and the sublime, was also a player in the Super PIM arena with its 1987 acquisition from Living Videotext of the excellent outlining and information management software GrandView. Symantic, however, then beset by financial difficulties and the exodus of the founders of Videotext, discontinued the product in the early 1990s. Back to the Future: Today's Super PIMS While the Super PIMs never attained mass market appeal, they paved the way for Super Specialized PIMs — databases with a friendly user interface designed for a specific type of information. For example, LexisNexis' CaseMap is a Super Specialized PIM on which I rely to manage the information in my litigation matters. Sadly, the price of CaseMap has rocketed into the stratosphere, and thus unattainable for many new solos. But what if you're not a litigator? Or what if you are a litigator who needs to store non-litigation information? Fortunately modern-day Super PIMs of the general variety still exist. You may not have heard of these products, but it's likely that one of them could boost your productivity at a relatively low cost. Among present-day heirs apparent, look for Zoot XT soon (TL NewsWire will no doubt keep you apprised of its launch). Zoot was very slow to blossom into the Windows era (having long retained a rather DOS-like look and feel), and has only just recently become a 32-bit product now that we live in a 64-bit computing world. Zoot is pretty much a one-man-show — that man being Tom Davis of Vermont. While this might seem like a downside at the outset, Zoot's survival suggests that big-company backing may not be such a big plus after all. And Zoot certainly enjoys strong (nearly cultish) user loyalty and support. I will review Zoot XT here in SmallLaw shortly after it becomes available. What's that? You use a Mac? Once in a while, a software program makes me question my dedication to PC over Mac. OmniGroup's OmniOutliner is just such a product. Catering to my Ecco Pro nostalgia (but in the most thoroughly modern way), OmniOutliner permits the creation of columns, each of which can contain different kinds of information (e.g., pop-up list, checkbox, numerical value, dates, duration, text, and even calculated values) concerning the corresponding outline item. You can separately format rows and columns. And a batch search will instantly collect all instances of a specified search term. You can embed or link to any type of file online or off. The Pro version even records audio. In short, OmniOutliner is the 2011 reincarnation of Ecco Pro — only better. Obviously, I have hard choices in my immediate future. Should I buy a Mac for this killer app, and install and run VMware Fusion for all my Windows software, or just use the iPad version of OmniOutliner, which in its present iteration, falls far short of its Mac counterpart? Software Lessons for Small Law Firms to Heed The moral of this story is that "newer" is not always "better." "Old" software need not necessarily be abandoned on an ice floe — at least until a truly capable replacement arrives to save the day. And most importantly, good ideas never die though they may take a decade or two to realize their full potential and attract a large enough audience to support them. Written by Yvonne M. Renfrew of Renfrew Law. How to Receive SmallLaw Small firm, big dreams. Published first via email newsletter and later here on our blog, SmallLaw provides you with a mix of practical advice that you can use today, and insight about what it will take for small law firms like yours to thrive in the future. The SmallLaw newsletter is free so don't miss the next issue. Please subscribe now.
def iteritems_by_object_type(self, object_type): for key, object_info in self.iteritems(): if object_info['object-type'] == object_type: yield key, object_info
/* * * Copyright 2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.magnum.mobilecloud.video; /** * This class allows you to estimate the score for your solution before handing * it in. * * This class also generates the required submission package to hand your * solution in. * * In order to run this AutoGrading application, in Eclipse, Right-click on * it->Run As->Java Application * * * Please read the instructions that the application prints very carefully. It * will store the generated solution submission packages in the * coursera-submission folder within the project directory. After running this * application, you will need to right-click on the root of the project in * Eclipse->Refresh to see this folder inside of Eclipse. Each time that you run * this application, it generates a new unique submission package. Make sure * that you submit the correct (e.g., most up to date) package when you turn * your assignment in. * * @author jules * * DO NOT MODIFY THIS CLASS ___ ___ ___ _____ /\ \ /\ \ /\ \ /::\ \ /::\ \ \:\ \ /::\ \ ___ /:/\:\ \ /:/\:\ \ \:\ \ /:/\:\ \ /\__\ /:/ \:\__\ /:/ \:\ \ _____\:\ \ /:/ \:\ \ /:/ / /:/__/ \:|__| /:/__/ \:\__\ /::::::::\__\ /:/__/ \:\__\ /:/__/ \:\ \ /:/ / \:\ \ /:/ / \:\~~\~~\/__/ \:\ \ /:/ / /::\ \ \:\ /:/ / \:\ /:/ / \:\ \ \:\ /:/ / /:/\:\ \ \:\/:/ / \:\/:/ / \:\ \ \:\/:/ / \/__\:\ \ \::/ / \::/ / \:\__\ \::/ / \:\__\ \/__/ \/__/ \/__/ \/__/ \/__/ ___ ___ ___ /\ \ /\ \ _____ /\__\ |::\ \ /::\ \ /::\ \ ___ /:/ _/_ ___ |:|:\ \ /:/\:\ \ /:/\:\ \ /\__\ /:/ /\__\ /| | __|:|\:\ \ /:/ \:\ \ /:/ \:\__\ /:/__/ /:/ /:/ / |:| | /::::|_\:\__\ /:/__/ \:\__\ /:/__/ \:|__| /::\ \ /:/_/:/ / |:| | \:\~~\ \/__/ \:\ \ /:/ / \:\ \ /:/ / \/\:\ \__ \:\/:/ / __|:|__| \:\ \ \:\ /:/ / \:\ /:/ / ~~\:\/\__\ \::/__/ /::::\ \ \:\ \ \:\/:/ / \:\/:/ / \::/ / \:\ \ ~~~~\:\ \ \:\__\ \::/ / \::/ / /:/ / \:\__\ \:\__\ \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ * * */ public class AutoGrading { public static void main(String[] args) throws Exception { AutoGradeSpec.grade(args); } }
Influences of female partner preference on potential reproductive outcome in Japanese macaques. The immediate effects of female partner preference on male mating behaviors and its potential influence on male reproductive success and conception in the Japanese macaque (Macaca fuscata) were analyzed. Although male dominance interactions probably led to low-ranking males having more single-day consortships and high-ranking males more multi-day consortships, dominant males were unable to prevent females from mating with preferred subordinate males. Ultimately, there was no marked difference in the number of estimated offspring sired. Females chose not to mate with certain males and actively mated with others, suggesting that female partner preferences do affect conception. Evidence for this was found in paternity estimates, which reflected observed preferences for particular middle-ranking young adult males.
/* * ----------------------------------------------------------------------- * Check Signals * Called after every instruction to check for queued IO-jobs * and for the signals IRQ and FIQ * ----------------------------------------------------------------------- */ static inline void CheckSignals(void) { if (gcpu.signals) { if (likely(gcpu.signals & ARM_SIG_IRQ)) { ARM_Exception(EX_IRQ, 4); } if (unlikely(gcpu.signals & ARM_SIG_FIQ)) { ARM_Exception(EX_FIQ, 4); } if (unlikely(gcpu.signals & ARM_SIG_DEBUGMODE)) { Do_Debug(); } if (unlikely(gcpu.signals & ARM_SIG_RESTART_IDEC)) { ARM_RestartIdecoder(); } } }
/** * @author buildupchao * @date 2017/8/17 */ public class CompletableFutureExample { public static void main(String[] args) { callInStream(); } public static void callInStream() { CompletableFuture<Void> future = CompletableFuture .supplyAsync(() -> calculate(50)) .thenApply((i) -> Integer.valueOf(i)) .thenApply((str) -> "\"" + str + "\"") .thenAccept(System.out::println); try { future.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } private static Integer calculate(Integer value) { try { Thread.sleep(3000); } catch (InterruptedException ex) { } return value * value; } }
/* Computes whether a symbol (defined directly as a typedef/typename, or via a class) can be empty. bool is_empty<S>::value The complication is that this is a recursive predicate, and if you try to ask is_empty<s>::value whilst trying to compute is_empty<s>::value, then the C++ compiler will give an compilation error. Example: class e { typename rule = Rules< e, Rule<100, e>, Rule<101, e, t>, Rule<102, t, e>, Rule<103>, Rule<104, r>, >; }; In order to process this, we can just skip all recursive calls. */ namespace slurp { template<typename T, typename Visited = ts_empty, bool Recursive = ts_contains<T, Visited>::value> struct is_empty { using visited = typename ts_concat<T, Visited>::type; static const bool value = is_empty<typename T::rule, visited>::value; }; template<typename T, typename Visited> struct is_empty<T, Visited, true> { // Recursive case static const bool value = false; using visited = Visited; }; template<int N, typename T, typename Visited> struct is_empty<Token<N, T>, Visited, false> { static const bool value = false; using visited = Visited; }; template<int N, typename Visited> struct is_empty<Ch<N>, Visited, false> { static const bool value = false; using visited = Visited; }; template<int A, int B, typename Visited> struct is_empty<Range<A, B>, Visited, false> { static const bool value = false; using visited = Visited; }; template<typename A, typename... B, typename Visited> struct is_empty<Rules<A, B...>, Visited, false> { typedef is_empty<A, Visited> t1; typedef is_empty<Rules<B...>, Visited> t2; static const bool value = t1::value || t2::value; using visited = Visited; }; template<typename Visited> struct is_empty<Rules<>, Visited, false> { static const bool value = false; using visited = Visited; }; template<int N, typename Visited> struct is_empty<Rule<N>, Visited, false> { static const bool value = true; using visited = Visited; }; template<int N, typename T, typename...Ts, typename Visited> struct is_empty<Rule<N, T, Ts...>, Visited, false> { using t1 = is_empty<T, Visited>; using t2 = is_empty<Rule<N, Ts...>, Visited>; using visited = Visited; static const bool value = t1::value && t2::value; }; }
/* * Copyright (C) 2019 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.docksnet.rgraph; import ch.docksnet.rgraph.fqn.FQN; import ch.docksnet.rgraph.fqn.FileFQN; import ch.docksnet.rgraph.fqn.Hierarchically; import ch.docksnet.rgraph.method.OuterReferences; import ch.docksnet.rgraph.method.ReferenceNode; import ch.docksnet.rgraph.method.SourceTargetPair; import ch.docksnet.utils.IncrementableSet; import ch.docksnet.utils.lcom.CalleesSubgraphAnalyzer; import ch.docksnet.utils.lcom.CallersSubgraphAnalyzer; import ch.docksnet.utils.lcom.ClusterAnalyzer; import ch.docksnet.utils.lcom.LCOMAnalyzerData; import ch.docksnet.utils.lcom.LCOMNode; import com.intellij.diagram.DiagramDataModel; import com.intellij.diagram.DiagramEdge; import com.intellij.diagram.DiagramNode; import com.intellij.diagram.DiagramProvider; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.ModificationTracker; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiManager; import com.intellij.psi.PsiReference; import com.intellij.psi.SmartPointerManager; import com.intellij.psi.SmartPsiElementPointer; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import static ch.docksnet.rgraph.PsiUtils.getFqn; @SuppressWarnings("MethodDoesntCallSuperMethod") public abstract class ReferenceDiagramDataModel extends DiagramDataModel<PsiElement> { private final Map<FQN, SmartPsiElementPointer<PsiElement>> elementsAddedByUser = new HashMap<>(); private final Map<FQN, SmartPsiElementPointer<PsiElement>> elementsRemovedByUser = new HashMap<>(); private final Collection<DiagramNode<PsiElement>> nodes = new HashSet<>(); private final Map<PsiElement, DiagramNode<PsiElement>> nodesPool = new HashMap<>(); private final Collection<DiagramEdge<PsiElement>> edges = new HashSet<>(); private final SmartPointerManager spManager; private long currentClusterCount = 0; private OuterReferences outerReferences = OuterReferences.empty(); public ReferenceDiagramDataModel(Project project, DiagramProvider<PsiElement> provider) { super(project, provider); this.spManager = SmartPointerManager.getInstance(getProject()); } @NotNull @Override public Collection<? extends DiagramNode<PsiElement>> getNodes() { if (this.nodes == null) { throw new IllegalStateException("@NotNull method %s.%s must not return null"); } else { return this.nodes; } } @NotNull @Override public Collection<? extends DiagramEdge<PsiElement>> getEdges() { if (this.edges == null) { throw new IllegalStateException(String.format("@NotNull method %s.%s must not return null", new Object[]{"com/intellij/uml/java/JavaUmlDataModel", "getEdges"})); } else { return this.edges; } } @Override public void refreshDataModel() { clearAll(); updateDataModel(); refresh(); } private void refresh() { analyzeLcom4(); updateToolWindow(); } private Set<PsiElement> getElements() { Set<PsiElement> result = new HashSet<>(); for (SmartPsiElementPointer<PsiElement> psiElementPointer : this.elementsAddedByUser.values()) { PsiElement element = psiElementPointer.getElement(); result.add(element); } for (SmartPsiElementPointer<PsiElement> psiElementPointer : this.elementsRemovedByUser.values()) { PsiElement element = psiElementPointer.getElement(); result.remove(element); } return result; } private void updateDataModel() { DiagramProvider<?> provider = getBuilder().getProvider(); Set<PsiElement> elements = getElements(); for (PsiElement element : elements) { if (isAllowedToShow(element)) { this.nodes.add(getReferenceNode(provider, element)); } } IncrementableSet<SourceTargetPair> relationships = resolveRelationships(); for (Map.Entry<SourceTargetPair, Long> sourceTargetPair : relationships.elements()) { SourceTargetPair key = sourceTargetPair.getKey(); DiagramNode<PsiElement> source = findNode(key.getSource()); DiagramNode<PsiElement> target = findNode(key.getTarget()); if (source != null && target != null && !source.equals(target)) { this.edges.add(toEdge(source, target, sourceTargetPair.getValue())); } } PsiElement initialElement = getBaseElement(); this.outerReferences = getOuterReferences(initialElement); } abstract protected PsiElement getBaseElement(); private OuterReferences getOuterReferences(PsiElement psiElement) { OuterReferences outerReferences = new OuterReferences(psiElement); FQN ownFqn = getBaseForOuterReferences(psiElement); if (!(ownFqn instanceof Hierarchically)) { return outerReferences; } Hierarchically ownHierarchy = (Hierarchically) ownFqn; for (DiagramNode<PsiElement> node : getNodes()) { PsiElement callee = node.getIdentifyingElement(); Collection<PsiReference> all = resolveOuterReferences(callee); for (PsiReference psiReference : all) { if (!(psiReference instanceof PsiElement)) { continue; } FileFQN otherFile = FileFQN.resolveHierarchically((PsiElement) psiReference); outerReferences.update(ownHierarchy, otherFile); } } return outerReferences; } @NotNull abstract protected Collection<PsiReference> resolveOuterReferences(PsiElement callee); protected abstract FQN getBaseForOuterReferences(PsiElement psiElement); private void updateToolWindow() { ServiceManager.getService(getProject(), ProjectService.class) .getSamePackageReferences() .replaceContent(this.outerReferences.getReferencesSamePackage()); ServiceManager.getService(getProject(), ProjectService.class) .getSameHierarchieReferences() .replaceContent(this.outerReferences.getReferencesSameHierarchy()); ServiceManager.getService(getProject(), ProjectService.class) .getOtherHierarchieReferences() .replaceContent(this.outerReferences.getReferencesOtherHierarchy()); } protected void clearAll() { this.nodes.clear(); this.edges.clear(); this.elementsRemovedByUser.clear(); } protected SmartPsiElementPointer<PsiElement> createSmartPsiElementPointer(PsiElement psiElement) { return this.spManager.createSmartPsiElementPointer(psiElement); } @NotNull private ReferenceNode getReferenceNode(DiagramProvider<?> provider, PsiElement element) { if (this.nodesPool.containsKey(element)) { return (ReferenceNode) this.nodesPool.get(element); } ReferenceNode node = new ReferenceNode(element, provider); this.nodesPool.put(element, node); return node; } @Override public boolean hasElement(PsiElement element) { return findNode(element) != null; } @Nullable private DiagramNode<PsiElement> findNode(PsiElement psiElement) { Iterator<?> ptr = (new ArrayList<>(this.nodes)).iterator(); while (ptr.hasNext()) { DiagramNode<PsiElement> node = (DiagramNode<PsiElement>) ptr.next(); FQN fqn = PsiUtils.getFqn((PsiElement) node.getIdentifyingElement()); if (fqn != null && fqn.equals(getFqn(psiElement))) { return node; } } return null; } @Override public boolean isPsiListener() { return true; } public void removeMarkedNodes() { List<ReferenceNode> toRemove = new ArrayList<>(); for (DiagramNode<PsiElement> myNode : this.nodes) { if (myNode instanceof ReferenceNode) { if (((ReferenceNode) myNode).isMarked()) { toRemove.add((ReferenceNode) myNode); ((ReferenceNode) myNode).switchMarked(); } } } Iterator<ReferenceNode> iterator = toRemove.iterator(); while (iterator.hasNext()) { ReferenceNode next = iterator.next(); removeElement((PsiElement) next.getIdentifyingElement()); } analyzeLcom4(); } public void isolateMarkedNodes() { List<ReferenceNode> toRemove = new ArrayList<>(); for (DiagramNode<PsiElement> myNode : this.nodes) { if (myNode instanceof ReferenceNode) { if (!((ReferenceNode) myNode).isMarked()) { toRemove.add((ReferenceNode) myNode); } else { ((ReferenceNode) myNode).switchMarked(); } } } Iterator<ReferenceNode> iterator = toRemove.iterator(); while (iterator.hasNext()) { ReferenceNode next = iterator.next(); removeElement((PsiElement) next.getIdentifyingElement()); } analyzeLcom4(); } public void unmarkAllNodes() { for (DiagramNode<PsiElement> myNode : this.nodes) { if (myNode instanceof ReferenceNode) { ((ReferenceNode) myNode).unsetMarked(); } } } private void analyzeLcom4() { LCOMConverter lcomConverter = new LCOMConverter(); Collection<LCOMNode> lcom4Nodes = lcomConverter.convert(getNodes(), getEdges()); LCOMAnalyzerData lcomAnalyzerData = new LCOMAnalyzerData(lcom4Nodes); ClusterAnalyzer clusterAnalyzer = new ClusterAnalyzer(lcomAnalyzerData); this.currentClusterCount = clusterAnalyzer.countCluster(); } private void removeElement(PsiElement element) { DiagramNode<PsiElement> node = findNode(element); if (node == null) { this.elementsAddedByUser.remove(PsiUtils.getFqn(element)); } else { PsiElement toRemove = (PsiElement) node.getIdentifyingElement(); this.nodes.remove(node); this.elementsRemovedByUser.put(PsiUtils.getFqn(element), createSmartPsiElementPointer(toRemove)); this.elementsAddedByUser.remove(PsiUtils.getFqn(element)); removeAllEdgesFromOrTo(node); } } private void removeAllEdgesFromOrTo(DiagramNode<PsiElement> node) { FQN removedNode = PsiUtils.getFqn(node.getIdentifyingElement()); Set<DiagramEdge<PsiElement>> toRemove = new HashSet<>(); for (DiagramEdge<PsiElement> myEdge : this.edges) { if (PsiUtils.getFqn(myEdge.getSource().getIdentifyingElement()).equals(removedNode)) { toRemove.add(myEdge); } else if (PsiUtils.getFqn(myEdge.getTarget().getIdentifyingElement()).equals(removedNode)) { toRemove.add(myEdge); } } this.edges.removeAll(toRemove); } @Override public void removeNode(DiagramNode<PsiElement> node) { removeElement((PsiElement) node.getIdentifyingElement()); analyzeLcom4(); } @Override public void dispose() { } protected void addUserElement(PsiElement child) { this.elementsAddedByUser.put(getFqn(child), createSmartPsiElementPointer(child)); } @Nullable @Override public DiagramNode<PsiElement> addElement(PsiElement psiElement) { addUserElement(psiElement); return new ReferenceNode(ReferenceDiagramDataModel.this.elementsAddedByUser.get(getFqn(psiElement)).getElement(), getProvider()); } protected abstract boolean isAllowedToShow(PsiElement element); protected abstract IncrementableSet<SourceTargetPair> resolveRelationships(); @NotNull @Override public ModificationTracker getModificationTracker() { return PsiManager.getInstance(getProject()).getModificationTracker(); } public long getCurrentClusterCount() { return this.currentClusterCount; } @Nullable protected abstract DiagramEdge<PsiElement> toEdge(@NotNull DiagramNode<PsiElement> from, @NotNull DiagramNode<PsiElement> to, Long value); @NotNull @Override public String getNodeName(DiagramNode<PsiElement> diagramNode) { return PsiUtils.getPresentableName(diagramNode.getIdentifyingElement()); } @SuppressWarnings("rawtypes") public void markCallees(List<DiagramNode> roots) { LCOMConverter lcomConverter = new LCOMConverter(); Collection<LCOMNode> lcom4Nodes = lcomConverter.convert(getNodes(), getEdges()); for (DiagramNode root : roots) { markCallees(root, lcom4Nodes); } } @SuppressWarnings("rawtypes") private void markCallees(DiagramNode root, Collection<LCOMNode> lcom4Nodes) { LCOMNode lcomRoot = searchRoot(root, lcom4Nodes); lcomRoot.getIdentifyingElement().setMarked(); List<LCOMNode> callees = getCalleesTransitiv(lcomRoot, lcom4Nodes); for (LCOMNode callee : callees) { callee.getIdentifyingElement().setMarked(); } } @SuppressWarnings("rawtypes") private LCOMNode searchRoot(DiagramNode diagramNode, Collection<LCOMNode> lcom4Nodes) { for (LCOMNode lcom4Node : lcom4Nodes) { if (lcom4Node.getIdentifyingElement().equals(diagramNode)) { return lcom4Node; } } throw new IllegalStateException("DiagramNode not found"); } private List<LCOMNode> getCalleesTransitiv(LCOMNode lcomRoot, Collection<LCOMNode> lcom4Nodes) { final LCOMAnalyzerData data = new LCOMAnalyzerData(lcom4Nodes); CalleesSubgraphAnalyzer analyzer = new CalleesSubgraphAnalyzer(data); List<LCOMNode> result = analyzer.getCallees(lcomRoot); return result; } @SuppressWarnings("rawtypes") public void markCallers(List<DiagramNode> roots) { LCOMConverter lcomConverter = new LCOMConverter(); Collection<LCOMNode> lcom4Nodes = lcomConverter.convert(getNodes(), getEdges()); for (DiagramNode root : roots) { markCallers(root, lcom4Nodes); } } @SuppressWarnings("rawtypes") private void markCallers(DiagramNode root, Collection<LCOMNode> lcom4Nodes) { LCOMNode lcomRoot = searchRoot(root, lcom4Nodes); lcomRoot.getIdentifyingElement().setMarked(); List<LCOMNode> callers = getCallersTransitiv(lcomRoot, lcom4Nodes); for (LCOMNode caller : callers) { caller.getIdentifyingElement().setMarked(); } } private List<LCOMNode> getCallersTransitiv(LCOMNode lcomRoot, Collection<LCOMNode> lcom4Nodes) { final LCOMAnalyzerData data = new LCOMAnalyzerData(lcom4Nodes); CallersSubgraphAnalyzer analyzer = new CallersSubgraphAnalyzer(data); List<LCOMNode> result = analyzer.getCallees(lcomRoot); return result; } public OuterReferences getOuterReferences() { return this.outerReferences; } }
#ifndef GAME_OVER_SCREEN_H #define GAME_OVER_SCREEN_H #include <SFML/Graphics.hpp> #include <vector> #include "Screen.h" #include "Button.h" namespace sfSnake { class GameOverScreen : public Screen { public: GameOverScreen(int score); void handleInput(sf::RenderWindow& window) override; void update(sf::Time delta) override; void render(sf::RenderWindow& window) override; private: int score_; sf::Text text_; std::vector<Button> buttons_; }; } #endif
M = 10**9+7 def suivantd(tab) : n = len(tab) tab2 = [0]*n x = 0 for i in range(n-1) : x = (tab[i]+x)%M tab2[i+1] += x return tab2 def suivantg(tab) : n = len(tab) tab2 = [0]*n x = 0 for i in range(n-1,0,-1) : x = (tab[i]+x)%M tab2[i-1] += x return tab2 def simulation(n,k) : total = 1 tab = [1]*n droite = False for _ in range(k-1) : total = (total+sum(tab))%M if droite : tab = suivantd(tab) else : tab = suivantg(tab) droite = not droite print(total) for kT in range(int(input())) : [n,k] = list(map(int,input().split())) simulation(n,k)
#include <iostream> using namespace std; int main() { int k,r; cin >> k >> r; //t*k = 10*x+r; //t*k = 10*x; int result = 0; for(int c = k; c <= k*10 && result == 0; c += k) { if( (c/10 > 0 && c%10 == 0) || ((c-r)/10 > 0 && (c-r)%10 == 0) || c == r ) result = c/k; } cout << result << endl; }
/** * intel_uncore_forcewake_user_put - release forcewake on behalf of userspace * @dev_priv: i915 device instance * * This function complements intel_uncore_forcewake_user_get() and releases * the GT powerwell taken on behalf of the userspace bypass. */ void intel_uncore_forcewake_user_put(struct drm_i915_private *dev_priv) { spin_lock_irq(&dev_priv->uncore.lock); if (!--dev_priv->uncore.user_forcewake.count) { if (intel_uncore_unclaimed_mmio(dev_priv)) dev_info(dev_priv->drm.dev, "Invalid mmio detected during user access\n"); dev_priv->uncore.unclaimed_mmio_check = dev_priv->uncore.user_forcewake.saved_mmio_check; i915_modparams.mmio_debug = dev_priv->uncore.user_forcewake.saved_mmio_debug; intel_uncore_forcewake_put__locked(dev_priv, FORCEWAKE_ALL); } spin_unlock_irq(&dev_priv->uncore.lock); }
Share. A delicious second course. A delicious second course. UPDATE (10/6/16): With iZombie: Season 2 available on Netflix as of today, we're re-featuring our review of the show's strong second season. Warning: Full spoilers for iZombie: Season 2 follow. Consistently offering clever, witty and fun episodes, iZombie solidified itself as one of the most entertaining series on TV in its second season. Rob Thomas and Diane Ruggiero-Wright had already created an offbeat yet inviting world in Season 1 and in Season 2 they built upon it, putting the characters into more intense and involving situations, all while still maintaining the show’s crucial, knowing sense of humor. The cast continue to be one of the most likeable you’ll find, anchored by the excellent Rose McIver. Okay, it’s one of the show’s reaches that pretty much every brain Liv eats is a very focused, specific type of person, but that’s just part of the deal here. And it gives McIver so much to work with, as she goes all in playing Liv taking on personas as varied as a coach, a stalker, a costumed vigilante or a tough stripper. Every week, McIver is given something different to play and she consistently nails it, with ongoing mileage gotten out of how out there and uncharacteristic Liv gets, depending on her latest brain meal. iZombie: Rose McIver, Rahul Kohli, Rob Thomas Season 2 Interview - Comic-Con 2015 > < ... Exit Theatre Mode After his heartbroken ex-fiancé character take a surprising (and awesome) turn at the end of Season 1, Robert Buckley’s Major got a great storyline in Season 2, as he found himself working for Vaughn Du Clark (Steven Weber), tasked with assassinating zombies – all while actually locking them up instead, which put him in a very precarious position both with Du Clark and the cops and the FBI, who were getting closer and closer to him for his actions in both Season 1 and 2. The fact that those investigating Major’s crimes were Clive (Malcolm Goodwin) and his FBI partner/love interest Dale Bozzio (Jessica Harmon) only increased the tension, even while Clive and Dale made a great pairing – with Harmon effortlessly fitting in on the show, as the somewhat goofy Dale provided a great foil for the somewhat stoic Clive. And in the midst of this, having Clive begin to slowly notice the things that were off about Liv was continually intriguing, since it was inevitable that Clive would one day find out The Secret. Blaine (David Anders) in the meantime had to adjust to life as a human again – for awhile at least, as he never kept his nose clean and eventually became one of the undead again, with Anders always bringing a wonderfully quirky/funny approach to the character. iZombie: Rose McIver Talks Major, Liv's Enemies and More > < ... Exit Theatre Mode McIver and Rahul Kohli continued to be a delightful duo in all the scenes between Liv and Ravi and Kohli shined throughout the season, though I do hope Season 3 can perhaps give Ravi more of his own storyline at some points beyond the ongoing search for a cure or the burgeoning love triangle between Ravi, Peyton (Aly Michalka) and Blaine. The end of the season, as Ravi began to suspect Major was up to no good – and their big confrontation about it – showed how strong it can be to use the usually comic presence of Ravi in a dramatic manner that would be interesting to explore again. As Season 2 progressed, one really strong element was how it began to bring together several storylines. We began to see Major’s growing interaction with Blaine begin to bring him even more in focus as a suspect for Dale and Clive, while Peyton’s return -- it was good to see Michalka, who also fits in great with this cast, get more to do -- had her wrapped up with Blaine (in more ways than one) and helping lead us to a new villain on the show, Stacey Boss (Eddie Jemison). Best of all, the “brain of the week” storylines began to becoming increasingly tied into the main stories as well. And yes, this meant sometimes you had to accept a bit more coincidence on the show, but it still was exciting and gratifying to see how all the different elements were intersecting in different ways and how Liv could learn new info thanks to a new murder victim connected in ways that were sometimes not apparent on the surface. The Cast on the Perfect Zombie-Fighting Weapon > < ... Exit Theatre Mode It didn’t all quite gel and some plotlines seemed discarded at a certain point or didn't quite pay off. The season began with so much drama involving Liv and her family, who then were barely mentioned at all as the months went on. And while he was talked about by many, Stacey Boss remained a bit too much in his own world at times, after making a compelling entrance. With both Du Clark, Blaine and Gilda (more on her below) already providing so much ongoing villainy, maybe Boss was one villain too many. I kept expecting him to more directly cross paths with others, notably Du Clark, but it rarely occurred. He remains in play for Season 3 though, so we’ll see where things go with him next time, but he was in a considerable amount of this season so it felt like it was aiming towards something bigger sooner. Still, when it came to Big Bads, Vaughn Du Clark certainly delivered. Stephen Weber seemed to be having a ball in the role and was delightfully awful as the energetic, confident mega-douche of a sports drink company CEO. He was also given a great foil in Gilda (Leanne Lapp), his daughter, who was just as corrupt as her dad. Gilda has no qualms about manipulating Major, Liv or anyone else and Lapp brought just the right attitude to the character – even as we saw just how awful Du Clark was as a dad, giving us a tinge of sympathy, or at least understanding, about why she was the way she was, even as it was clear she needed to be stopped. The season also ended in an epic, satisfying manner, with Clive finally finding out the truth, an all-out “Romero Zombie” attack and both Du Clark and Gilda being taken out – all while we met a huge new player on the scene that looks to be upending the show in a huge way.
Effect of Sargassum sp. Extract on Phagocytic Activity of White Snapper (Lates calcarifer) Infected by Vibrio alginolyticus Bacteria Sargassum sp. is a natural synthesis product it has bioactive metabolites as flavonoid which can increase the fish body's defense system from attacks of disease. The purpose of this study was to determine the effect of Sargassum sp. added into the feed against the phagocytic activity of white snapper infected by Vibrio alginolyticus bacteria. This research was conducted using a completely randomized design method with 4 treatments and 3 replications, with K treatments (Sargassum sp. extract 0 ppm), A (Sargassum sp. extract 50 ppm), B (Sargassum sp. extract 70 ppm), and C (Sargassum sp. extract 90 ppm). The results showed that the administration of Sargassum sp. extract in white snapper feed and after infected by Vibrio alginolyticus bacteria was able to giving effect on phagocytic activity. The best concentration was obtained in treatment B with Sargassum sp. extract. 70 ppm with the highest phagocytosis activity value of 77.33%, and the highest survival rate of 43.33%.  
def import_(modulename): if debug: print >> sys.stderr, "import_:", modulename mod = __import__(modulename) for comp in modulename.split('.')[1:]: mod = getattr(mod, comp) return mod
<filename>concepts/graph/__main__.py from concepts.graph import Graph, Node graph = Graph() node1 = Node(1) node2 = Node(2) node3 = Node(3) node4 = Node(4) node5 = Node(5) node6 = Node(6) node1.children.append(node2) node1.children.append(node3) node2.children.append(node1) node2.children.append(node4) node3.children.append(node5) node3.children.append(node6) nodes = [node1, node2, node3, node4, node5, node6] graph.nodes = nodes for node in graph.nodes: graph.dfs(node) print() for node in graph.nodes: graph.bfs(node)
#import <UIKit/UIKit.h> FOUNDATION_EXPORT double Pods_Instagram_ProjectVersionNumber; FOUNDATION_EXPORT const unsigned char Pods_Instagram_ProjectVersionString[];
<reponame>gromgit/pour package cmd import ( "fmt" "github.com/gromgit/pour/internal/formula" "os" "regexp" "strings" ) // pour search [TEXT|/REGEX/] func StringMatcher(m string) func(s string) bool { return func(s string) bool { return strings.Index(s, m) >= 0 } } func RegexMatcher(m string) func(s string) bool { return func(s string) bool { match, err := regexp.MatchString(m, s) if err != nil { fmt.Fprintf(os.Stderr, "regexp error: %s\n", err) return false } else { return match } } } func NameGetter(item formula.Formula) string { return item.Name } func DescGetter(item formula.Formula) string { return item.Desc } func Search(allf formula.Formulas, args []string) error { var matcher func(s string) bool var getter func(item formula.Formula) string fmt.Printf("Doing search %v\n", args) getter = NameGetter // Handle options SearchOptions: for len(args) > 0 { switch { case strings.HasPrefix(args[0], "--desc"): getter = DescGetter case strings.HasPrefix(args[0], "--inst"): // Filter out only the installed stuff allf = allf.Filter(func(f formula.Formula) bool { return f.Status == formula.INSTALLED }) default: break SearchOptions } args = args[1:] } if len(args) == 0 { // Return all bottles allf.Ls() } else { spec := args[0] if spec[0] == '/' && spec[len(spec)-1] == '/' { // Regex search matcher = RegexMatcher(spec[1 : len(spec)-1]) } else { // String search matcher = StringMatcher(spec) } allf.Filter( func(item formula.Formula) bool { return matcher(getter(item)) }). Ls() } return nil }
// Autogenerated from CppHeaderCreator // Created by Sc2ad // ========================================================================= #pragma once // Begin includes #include "beatsaber-hook/shared/utils/byref.hpp" // Including type: UnityEngine.ParticleSystem #include "UnityEngine/ParticleSystem.hpp" // Including type: System.ValueType #include "System/ValueType.hpp" // Completed includes // Begin forward declares // Forward declaring namespace: UnityEngine namespace UnityEngine { // Forward declaring type: ParticleSystemSimulationSpace struct ParticleSystemSimulationSpace; } // Completed forward declares #include "beatsaber-hook/shared/utils/il2cpp-type-check.hpp" DEFINE_IL2CPP_ARG_TYPE(::UnityEngine::ParticleSystem::MainModule, "UnityEngine", "ParticleSystem/MainModule"); // Type namespace: UnityEngine namespace UnityEngine { // Size: 0x8 #pragma pack(push, 1) // WARNING Layout: Sequential may not be correctly taken into account! // Autogenerated type: UnityEngine.ParticleSystem/UnityEngine.MainModule // [TokenAttribute] Offset: FFFFFFFF struct ParticleSystem::MainModule/*, public ::System::ValueType*/ { public: public: // UnityEngine.ParticleSystem m_ParticleSystem // Size: 0x8 // Offset: 0x0 ::UnityEngine::ParticleSystem* m_ParticleSystem; // Field size check static_assert(sizeof(::UnityEngine::ParticleSystem*) == 0x8); public: // Creating value type constructor for type: MainModule constexpr MainModule(::UnityEngine::ParticleSystem* m_ParticleSystem_ = {}) noexcept : m_ParticleSystem{m_ParticleSystem_} {} // Creating interface conversion operator: operator ::System::ValueType operator ::System::ValueType() noexcept { return *reinterpret_cast<::System::ValueType*>(this); } // Creating conversion operator: operator ::UnityEngine::ParticleSystem* constexpr operator ::UnityEngine::ParticleSystem*() const noexcept { return m_ParticleSystem; } // Get instance field reference: UnityEngine.ParticleSystem m_ParticleSystem ::UnityEngine::ParticleSystem*& dyn_m_ParticleSystem(); // public System.Single get_duration() // Offset: 0x12FAFB4 float get_duration(); // public System.Boolean get_loop() // Offset: 0x12FB034 bool get_loop(); // public System.Void set_loop(System.Boolean value) // Offset: 0x12FB0B4 void set_loop(bool value); // public System.Void set_startColor(UnityEngine.ParticleSystem/UnityEngine.MinMaxGradient value) // Offset: 0x12FB154 void set_startColor(::UnityEngine::ParticleSystem::MinMaxGradient value); // public System.Void set_simulationSpace(UnityEngine.ParticleSystemSimulationSpace value) // Offset: 0x12FB1F4 void set_simulationSpace(::UnityEngine::ParticleSystemSimulationSpace value); // public System.Void set_simulationSpeed(System.Single value) // Offset: 0x12FB294 void set_simulationSpeed(float value); // public System.Void set_playOnAwake(System.Boolean value) // Offset: 0x12FB334 void set_playOnAwake(bool value); // System.Void .ctor(UnityEngine.ParticleSystem particleSystem) // Offset: 0x12FAD30 // ABORTED: conflicts with another method. MainModule(::UnityEngine::ParticleSystem* particleSystem); // static private System.Single get_duration_Injected(ref UnityEngine.ParticleSystem/UnityEngine.MainModule _unity_self) // Offset: 0x12FAFF4 static float get_duration_Injected(ByRef<::UnityEngine::ParticleSystem::MainModule> _unity_self); // static private System.Boolean get_loop_Injected(ref UnityEngine.ParticleSystem/UnityEngine.MainModule _unity_self) // Offset: 0x12FB074 static bool get_loop_Injected(ByRef<::UnityEngine::ParticleSystem::MainModule> _unity_self); // static private System.Void set_loop_Injected(ref UnityEngine.ParticleSystem/UnityEngine.MainModule _unity_self, System.Boolean value) // Offset: 0x12FB104 static void set_loop_Injected(ByRef<::UnityEngine::ParticleSystem::MainModule> _unity_self, bool value); // static private System.Void set_startColor_Injected(ref UnityEngine.ParticleSystem/UnityEngine.MainModule _unity_self, ref UnityEngine.ParticleSystem/UnityEngine.MinMaxGradient value) // Offset: 0x12FB1A4 static void set_startColor_Injected(ByRef<::UnityEngine::ParticleSystem::MainModule> _unity_self, ByRef<::UnityEngine::ParticleSystem::MinMaxGradient> value); // static private System.Void set_simulationSpace_Injected(ref UnityEngine.ParticleSystem/UnityEngine.MainModule _unity_self, UnityEngine.ParticleSystemSimulationSpace value) // Offset: 0x12FB244 static void set_simulationSpace_Injected(ByRef<::UnityEngine::ParticleSystem::MainModule> _unity_self, ::UnityEngine::ParticleSystemSimulationSpace value); // static private System.Void set_simulationSpeed_Injected(ref UnityEngine.ParticleSystem/UnityEngine.MainModule _unity_self, System.Single value) // Offset: 0x12FB2E4 static void set_simulationSpeed_Injected(ByRef<::UnityEngine::ParticleSystem::MainModule> _unity_self, float value); // static private System.Void set_playOnAwake_Injected(ref UnityEngine.ParticleSystem/UnityEngine.MainModule _unity_self, System.Boolean value) // Offset: 0x12FB384 static void set_playOnAwake_Injected(ByRef<::UnityEngine::ParticleSystem::MainModule> _unity_self, bool value); }; // UnityEngine.ParticleSystem/UnityEngine.MainModule #pragma pack(pop) static check_size<sizeof(ParticleSystem::MainModule), 0 + sizeof(::UnityEngine::ParticleSystem*)> __UnityEngine_ParticleSystem_MainModuleSizeCheck; static_assert(sizeof(ParticleSystem::MainModule) == 0x8); } #include "beatsaber-hook/shared/utils/il2cpp-utils-methods.hpp" // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::get_duration // Il2CppName: get_duration template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<float (UnityEngine::ParticleSystem::MainModule::*)()>(&UnityEngine::ParticleSystem::MainModule::get_duration)> { static const MethodInfo* get() { return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "get_duration", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::get_loop // Il2CppName: get_loop template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<bool (UnityEngine::ParticleSystem::MainModule::*)()>(&UnityEngine::ParticleSystem::MainModule::get_loop)> { static const MethodInfo* get() { return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "get_loop", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_loop // Il2CppName: set_loop template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (UnityEngine::ParticleSystem::MainModule::*)(bool)>(&UnityEngine::ParticleSystem::MainModule::set_loop)> { static const MethodInfo* get() { static auto* value = &::il2cpp_utils::GetClassFromName("System", "Boolean")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_loop", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_startColor // Il2CppName: set_startColor template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (UnityEngine::ParticleSystem::MainModule::*)(::UnityEngine::ParticleSystem::MinMaxGradient)>(&UnityEngine::ParticleSystem::MainModule::set_startColor)> { static const MethodInfo* get() { static auto* value = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MinMaxGradient")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_startColor", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_simulationSpace // Il2CppName: set_simulationSpace template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (UnityEngine::ParticleSystem::MainModule::*)(::UnityEngine::ParticleSystemSimulationSpace)>(&UnityEngine::ParticleSystem::MainModule::set_simulationSpace)> { static const MethodInfo* get() { static auto* value = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystemSimulationSpace")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_simulationSpace", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_simulationSpeed // Il2CppName: set_simulationSpeed template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (UnityEngine::ParticleSystem::MainModule::*)(float)>(&UnityEngine::ParticleSystem::MainModule::set_simulationSpeed)> { static const MethodInfo* get() { static auto* value = &::il2cpp_utils::GetClassFromName("System", "Single")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_simulationSpeed", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_playOnAwake // Il2CppName: set_playOnAwake template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (UnityEngine::ParticleSystem::MainModule::*)(bool)>(&UnityEngine::ParticleSystem::MainModule::set_playOnAwake)> { static const MethodInfo* get() { static auto* value = &::il2cpp_utils::GetClassFromName("System", "Boolean")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_playOnAwake", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::MainModule // Il2CppName: .ctor // Cannot get method pointer of value based method overload from template for constructor! // Try using FindMethod instead! // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::get_duration_Injected // Il2CppName: get_duration_Injected template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<float (*)(ByRef<::UnityEngine::ParticleSystem::MainModule>)>(&UnityEngine::ParticleSystem::MainModule::get_duration_Injected)> { static const MethodInfo* get() { static auto* _unity_self = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MainModule")->this_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "get_duration_Injected", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{_unity_self}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::get_loop_Injected // Il2CppName: get_loop_Injected template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<bool (*)(ByRef<::UnityEngine::ParticleSystem::MainModule>)>(&UnityEngine::ParticleSystem::MainModule::get_loop_Injected)> { static const MethodInfo* get() { static auto* _unity_self = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MainModule")->this_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "get_loop_Injected", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{_unity_self}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_loop_Injected // Il2CppName: set_loop_Injected template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (*)(ByRef<::UnityEngine::ParticleSystem::MainModule>, bool)>(&UnityEngine::ParticleSystem::MainModule::set_loop_Injected)> { static const MethodInfo* get() { static auto* _unity_self = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MainModule")->this_arg; static auto* value = &::il2cpp_utils::GetClassFromName("System", "Boolean")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_loop_Injected", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{_unity_self, value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_startColor_Injected // Il2CppName: set_startColor_Injected template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (*)(ByRef<::UnityEngine::ParticleSystem::MainModule>, ByRef<::UnityEngine::ParticleSystem::MinMaxGradient>)>(&UnityEngine::ParticleSystem::MainModule::set_startColor_Injected)> { static const MethodInfo* get() { static auto* _unity_self = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MainModule")->this_arg; static auto* value = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MinMaxGradient")->this_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_startColor_Injected", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{_unity_self, value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_simulationSpace_Injected // Il2CppName: set_simulationSpace_Injected template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (*)(ByRef<::UnityEngine::ParticleSystem::MainModule>, ::UnityEngine::ParticleSystemSimulationSpace)>(&UnityEngine::ParticleSystem::MainModule::set_simulationSpace_Injected)> { static const MethodInfo* get() { static auto* _unity_self = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MainModule")->this_arg; static auto* value = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystemSimulationSpace")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_simulationSpace_Injected", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{_unity_self, value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_simulationSpeed_Injected // Il2CppName: set_simulationSpeed_Injected template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (*)(ByRef<::UnityEngine::ParticleSystem::MainModule>, float)>(&UnityEngine::ParticleSystem::MainModule::set_simulationSpeed_Injected)> { static const MethodInfo* get() { static auto* _unity_self = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MainModule")->this_arg; static auto* value = &::il2cpp_utils::GetClassFromName("System", "Single")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_simulationSpeed_Injected", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{_unity_self, value}); } }; // Writing MetadataGetter for method: UnityEngine::ParticleSystem::MainModule::set_playOnAwake_Injected // Il2CppName: set_playOnAwake_Injected template<> struct ::il2cpp_utils::il2cpp_type_check::MetadataGetter<static_cast<void (*)(ByRef<::UnityEngine::ParticleSystem::MainModule>, bool)>(&UnityEngine::ParticleSystem::MainModule::set_playOnAwake_Injected)> { static const MethodInfo* get() { static auto* _unity_self = &::il2cpp_utils::GetClassFromName("UnityEngine", "ParticleSystem/MainModule")->this_arg; static auto* value = &::il2cpp_utils::GetClassFromName("System", "Boolean")->byval_arg; return ::il2cpp_utils::FindMethod(classof(UnityEngine::ParticleSystem::MainModule), "set_playOnAwake_Injected", std::vector<Il2CppClass*>(), ::std::vector<const Il2CppType*>{_unity_self, value}); } };
Among the more intriguing results from Tuesday’s primary contests, the crowded race to fill the heavily-Democratic Eighth Congressional District in Maryland featured none other than the wife of Hardball host Chris Matthews whose candidacy included accusations that the couple leveraged their coziness with the D.C. and Hollywood elite for campaign donations. Unfortunately, MSNBC made no mention of this race during their live results coverage when polls closed at 8:00 p.m. Eastern until 1:00 a.m. Eastern and along with the MSNBC personality being absent from the coverage due the the campaign but, most importantly, that Kathleen Matthews finished in third. As of this writing (around 2:20 a.m. Eastern), far-left Maryland State Senator Jamie Raskin was the winner at 33.7 percent followed by Total Wine & More founder David Trone at 27.3 percent and Matthews behind them at 23.8 pecent. Matthews was name-checked by co-host Brian Williams as being "off tonight attending to family business" at the top of the 6:00 p.m. Eastern hour, but it was never disclosed to any uninformed viewers that the reason had to due with Kathleen's campaign. Perhaps even worse, All In host Chris Hayes referenced the race for the Democratic nomination to replace senatorial candidate Chris Van Hollen late in the evening and the top two finishers but not the network’s personal connection to it. After Hayes and co-host Rachel Maddow discussed the boost that senatorial candidate Katie McGinty received in Pennsylvania from the Democratic Party establishment, Hayes made the lone reference to the Maryland district as a case where power and money didn’t pan out with Raskin besting Trone: That wasn’t the case in, say, a place like — um, for instance, Maryland’s congressional district, Van Hollen’s vacated seat where it looks like Jamie Raskin, I believe, is in the lead right now, where someone who had spent $12 million, dropped a lot of money and wasn't able to win the seat, so the voters ultimately get a say on Election Day. For what it’s worth, MTP Daily host Chuck Todd alluded to the race during his 5:00 p.m. Eastern show with the top candidates being Matthews, Raskin, and Trone. However, this came well before the polls closed and the final tally was revealed. The relevant portion of the transcript from MSNBC’s The Place for Politics 2016 on April 26 can be found below.
// Inner class for text input change handling class TextChangeListener implements PropertyChangeListener { // The ActionListener interface requires that we override the actionPerformed() // method. // This method will be called automatically whenever a button event occurs. @Override public void propertyChange(PropertyChangeEvent e) { // The ActionEvent getSource() method returns a reference to the button widget // that was clicked. // This allows us to use one event listener for more than one JButton, if // desired. // Change lineWidth on detection. if (e.getSource() == lineWidthTextField) { lineWidth = (Integer) (lineWidthTextField.getValue()); setLineWidthTextField(lineWidth); } // Change dashLength on detection. else if (e.getSource() == dashLengthTextField) { dashLength = (Integer) (dashLengthTextField.getValue()); setDashLengthTextField(dashLength); } } }
<filename>authtest/gssapi/windows/errmsg.h<gh_stars>0 void errmsg(const char *msg, int status);
<reponame>13dev/discord-lottery import Youtube from 'youtube.ts' import { Service } from 'typedi' import { Config } from '@src/config' import { videoFormat } from 'ytdl-core' type VideoFormat = videoFormat type VideoFormats = VideoFormat[] @Service() export default class YoutubeAdapter extends Youtube { constructor() { super(Config.youtube.apiKey) } public chooseNextBestFormat(formats: VideoFormats): VideoFormat { // TODO: CACHE // try { // // ffmpegInput = await this.fileCache.getPathFor( // // this.getHashForCache(url) // // ) // // if (options.seek) { // ffmpegInputOptions.push('-ss', options.seek.toString()) // } // } catch { const bestFormat = formats.find(YoutubeAdapter.findByCodecAndSampleRate) if (!bestFormat) { const format = YoutubeAdapter.nextBestFormat(formats) if (!format) { throw new Error('Cant find suitable format.') } return format } return bestFormat } private static nextBestFormat(formats: VideoFormats): VideoFormat | undefined { if (formats[0].isLive) { return formats .sort(YoutubeAdapter.sortByAudioBitRate) .find(YoutubeAdapter.findByLiveStreamItags) } return ( formats .sort(YoutubeAdapter.sortByAudioBitRate) .filter((format) => format.averageBitrate) .sort(YoutubeAdapter.sortByAverageBitRate) .find((format) => !format.bitrate) ?? formats[0] ) } private static sortByAudioBitRate(a: VideoFormat, b: VideoFormat) { return b.audioBitrate! - a.audioBitrate! } private static sortByAverageBitRate(a: VideoFormat, b: VideoFormat) { return a && b ? b.averageBitrate! - a.averageBitrate! : 0 } private static findByCodecAndSampleRate(format: VideoFormat) { return ( format.codecs === 'opus' && format.container === 'webm' && format.audioSampleRate !== undefined && parseInt(format.audioSampleRate, 10) === 48000 ) } private static findByLiveStreamItags(format: VideoFormat) { return [128, 127, 120, 96, 95, 94, 93].includes(Number(format.itag)) } }
<gh_stars>1-10 /** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <hr> * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * This file has been modified by the OpenOLAT community. Changes are licensed * under the Apache 2.0 license as the original file. */ package org.olat.search.service.indexer.repository; import java.io.IOException; import java.util.List; import org.apache.logging.log4j.Logger; import org.olat.core.CoreSpringFactory; import org.olat.core.gui.components.tree.TreeNode; import org.olat.core.id.Identity; import org.olat.core.id.IdentityEnvironment; import org.olat.core.id.Roles; import org.olat.core.id.context.BusinessControl; import org.olat.core.id.context.ContextEntry; import org.olat.core.logging.StartupException; import org.olat.core.logging.Tracing; import org.olat.core.util.nodes.INode; import org.olat.course.CorruptedCourseException; import org.olat.course.CourseFactory; import org.olat.course.CourseModule; import org.olat.course.ICourse; import org.olat.course.nodeaccess.NodeAccessService; import org.olat.course.nodes.CourseNode; import org.olat.course.run.userview.AccessibleFilter; import org.olat.course.run.userview.UserCourseEnvironment; import org.olat.course.run.userview.UserCourseEnvironmentImpl; import org.olat.repository.RepositoryEntry; import org.olat.repository.RepositoryEntryStatusEnum; import org.olat.repository.RepositoryManager; import org.olat.search.service.SearchResourceContext; import org.olat.search.service.indexer.AbstractHierarchicalIndexer; import org.olat.search.service.indexer.Indexer; import org.olat.search.service.indexer.OlatFullIndexer; import org.olat.search.service.indexer.repository.course.CourseNodeEntry; import org.olat.search.service.indexer.repository.course.CourseNodeIndexer; /** * Index a whole course. * @author <NAME> */ public class CourseIndexer extends AbstractHierarchicalIndexer { private static final Logger log = Tracing.createLoggerFor(CourseIndexer.class); public static final String TYPE = "type.repository.entry.CourseModule"; private RepositoryManager repositoryManager; /** * [used by Spring] * @param repositoryManager */ public void setRepositoryManager(RepositoryManager repositoryManager) { this.repositoryManager = repositoryManager; } @Override public String getSupportedTypeName() { return CourseModule.getCourseTypeName(); } @Override public void doIndex(SearchResourceContext parentResourceContext, Object parentObject, OlatFullIndexer indexWriter) { RepositoryEntry repositoryEntry = (RepositoryEntry) parentObject; if (log.isDebugEnabled()) log.debug("Analyse Course... repositoryEntry=" + repositoryEntry); try { RepositoryEntryStatusEnum status = repositoryEntry.getEntryStatus(); if(status.decommissioned()) { if(log.isDebugEnabled()) log.debug("Course not indexed because it's " + status + ": repositoryEntry=" + repositoryEntry); return; } ICourse course = CourseFactory.loadCourse(repositoryEntry); // course.getCourseTitle(); // do not index title => index root-node parentResourceContext.setParentContextType(TYPE); parentResourceContext.setParentContextName(course.getCourseTitle()); doIndexCourse( parentResourceContext, course, course.getRunStructure().getRootNode(), indexWriter); } catch(CorruptedCourseException ex) { log.warn("Can not index repositoryEntry (" + repositoryEntry.getKey() + ")", ex); } catch (Exception ex) { log.warn("Can not index repositoryEntry=" + repositoryEntry,ex); } } /** * * @param repositoryResourceContext * @param course * @param courseNode * @param indexWriter * @throws IOException * @throws InterruptedException */ private void doIndexCourse(SearchResourceContext repositoryResourceContext, ICourse course, INode node, OlatFullIndexer indexWriter) throws IOException,InterruptedException { //try to index the course node if(node instanceof CourseNode) { if (log.isDebugEnabled()) log.debug("Analyse CourseNode child ... childCourseNode=" + node); // go further with resource CourseNode childCourseNode = (CourseNode)node; CourseNodeIndexer courseNodeIndexer = getCourseNodeIndexer(childCourseNode); if (courseNodeIndexer != null) { if (log.isDebugEnabled()) { log.debug("courseNodeIndexer=" + courseNodeIndexer); } try { courseNodeIndexer.doIndex(repositoryResourceContext, course, childCourseNode, indexWriter); } catch (Exception e) { log.warn("Can not index course node=" + childCourseNode.getIdent(), e); } } } //loop over all child nodes int childCount = node.getChildCount(); for (int i=0;i<childCount; i++) { INode childNode = node.getChildAt(i); doIndexCourse(repositoryResourceContext, course, childNode, indexWriter); } } /** * Bean setter method used by spring. * @param indexerList */ @Override public void setIndexerList(List<Indexer> indexerList) { for (Indexer courseNodeIndexer : indexerList) { if(!(courseNodeIndexer instanceof CourseNodeIndexer)) { throw new StartupException("Configured indexer is not of type RepositoryEntryIndexer: " + courseNodeIndexer); } } super.setIndexerList(indexerList); } @Override public boolean checkAccess(ContextEntry contextEntry, BusinessControl businessControl, Identity identity, Roles roles) { ContextEntry bcContextEntry = businessControl.popLauncherContextEntry(); if (bcContextEntry == null) { // no context-entry anymore, the repository entry itself is the context entry, // not a course node of course we have access to the course metadata return true; } if (log.isDebugEnabled()) log.debug("Start identity=" + identity + " roles=" + roles); Long repositoryKey = contextEntry.getOLATResourceable().getResourceableId(); RepositoryEntry repositoryEntry = repositoryManager.lookupRepositoryEntry(repositoryKey); if (log.isDebugEnabled()) log.debug("repositoryEntry=" + repositoryEntry ); if(roles.isGuestOnly() && repositoryEntry.isGuests()) { return false; } Long nodeId = bcContextEntry.getOLATResourceable().getResourceableId(); if (log.isDebugEnabled()) log.debug("nodeId=" + nodeId ); ICourse course = CourseFactory.loadCourse(repositoryEntry); IdentityEnvironment ienv = new IdentityEnvironment(); ienv.setIdentity(identity); ienv.setRoles(roles); UserCourseEnvironment userCourseEnv = new UserCourseEnvironmentImpl(ienv, course.getCourseEnvironment()); if (log.isDebugEnabled()) log.debug("userCourseEnv=" + userCourseEnv + "ienv=" + ienv ); String nodeIdS = nodeId.toString(); CourseNode courseNode = course.getRunStructure().getNode(nodeIdS); if (log.isDebugEnabled()) log.debug("courseNode=" + courseNode ); TreeNode treeNode = CoreSpringFactory.getImpl(NodeAccessService.class) .getCourseTreeModelBuilder(userCourseEnv) .withFilter(AccessibleFilter.create()) .build() .getNodeById(courseNode.getIdent()); if (treeNode == null) { // TreeNode no longer visible and accessible return false; } CourseNodeIndexer courseNodeIndexer = getCourseNodeIndexer(courseNode); bcContextEntry.setTransientState(new CourseNodeEntry(courseNode)); return courseNodeIndexer != null && courseNodeIndexer.checkAccess(bcContextEntry, businessControl, identity, roles) && super.checkAccess(bcContextEntry, businessControl, identity, roles); } private CourseNodeIndexer getCourseNodeIndexer(CourseNode node) { String courseNodeName = node.getClass().getName(); List<Indexer> courseNodeIndexer = getIndexerByType(courseNodeName); if (courseNodeIndexer != null && !courseNodeIndexer.isEmpty()) { return (CourseNodeIndexer)courseNodeIndexer.get(0); } return null; } }
Map of Celtic-influenced regions of Europe Celtic toponymy is the study of place names wholly or partially of Celtic origin. These names are found throughout continental Europe, Britain, Ireland, Anatolia and, latterly, through various other parts of the globe not originally occupied by Celts. Celtic languages [ edit ] The Proto-Indo-European language developed into various daughter languages, including the Proto-Celtic language. In Proto-Celtic ("PC"), the Proto-Indo-European ("PIE") sound *p disappeared, perhaps through an intermediate *ɸ. After that, languages derived from Proto-Celtic changed PC *kw into either *p or *k (see: P-Celtic and Q-Celtic languages). In P-Celtic languages, PC *kw changed into *p. In Q-Celtic dialects it developed into /k/. Modern Celticists believe these changes happened after the split between Insular Celtic languages and the Continental Celtic languages. P-Celtic languages include the Continental Gaulish language and the Brittonic branch of Insular Celtic. Common Brittonic is the ancestor of Welsh, Cornish and Breton. Ancient Q-Celtic languages include the Continental Celtiberian and the Goidelic branch of Insular Celtic. Goidelic is the ancestor of the Gaelic languages Irish, Scottish Gaelic and Manx. Frequent elements [ edit ] Celtic * briga 'hill, high place' > Welsh bri 'honourable, respected' (not directly related to Welsh bryn 'hill'), Irish brí 'hill' 'hill, high place' > Welsh 'honourable, respected' (not directly related to Welsh 'hill'), Irish 'hill' Celtic * brigant- 'high, lofty, elevated'; used as a feminine divine name, rendered Brigantia in Latin 'high, lofty, elevated'; used as a feminine divine name, rendered Brigantia in Latin Celtic * brīwa 'bridge' 'bridge' Celtic * dūnon 'fortress' > Welsh dinas 'city' & din 'fortress', Irish dún 'fortress' 'fortress' > Welsh 'city' & 'fortress', Irish 'fortress' Celtic * duro- 'fort' 'fort' Celtic * k w enno- 'head' > Brythonic * penn- , Welsh pen 'head, end, chief, supreme', Irish ceann 'head' 'head' > Brythonic * , Welsh 'head, end, chief, supreme', Irish 'head' Celtic * magos 'field, plain' > Welsh maes 'field', Irish magh 'plain' 'field, plain' > Welsh 'field', Irish 'plain' Celtic *windo- 'white, fair, blessed' > Welsh gwyn/wyn / gwen/wen 'white, blessed', Old Irish find, Irish fionn 'fair' Continental Celtic [ edit ] Austria [ edit ] Bregenz, Vorarlberg, Latin Brigantium From Celtic *brigant- 'high, lofty, elevated' (or divine name, Brigantia) Wien, English Vienna, Latin Vindobona From Celtic *windo- 'white' (Welsh gwyn) + *bona 'base, foundation' (Welsh bôn 'base, bottom, stump') Belgium [ edit ] Ardennes, Latin Arduenna Silva From divine name Arduinna, from Celtic *ardu- 'high' (Irish ard) + Latin silva 'forest' France [ edit ] Most of the main cities in France have a Celtic name (the original Gaulish one or the name of the Gaulish tribe). From Celtic *brigant- 'high, lofty, elevated' (or divine name, Brigantia) Brive-la-Gaillarde < Briva 'bridge' < 'bridge' Brives Caen (Cahan, Cahon) < Catumagos . From Old Celtic catu- 'battle' 'fight' 'combat', Old Irish cath 'battle, battalion, troop', Breton -kad / -gad , Welsh cad 'combat, troop'. The general meaning could be 'battlefield' [1] (Cahan, Cahon) < . From Old Celtic 'battle' 'fight' 'combat', Old Irish 'battle, battalion, troop', Breton / , Welsh 'combat, troop'. The general meaning could be 'battlefield' Cahors Chambord Carentan < Carentomagus , Idem Charenton , etc. < , Idem , etc. Divodurum (Latin), now Metz, Lorraine From Celtic *diwo- 'god, holy, divine' (Scottish Gaelic dia 'god') + *duro- 'fort' Évreux < (Civitas) Eburovicensis ; former Mediolanum < ; former Laon , Aisne, Latin Lugdunum Clavatum , Aisne, Latin Lillebonne Limoges Lisieux < (Civitas) Lexoviensis ; former Noviomagus [2] 'new market', Old Celtic noviios 'new'. < ; former 'new market', Old Celtic 'new'. Lugdunum Convenarum (Latin), now Saint-Bertrand-de-Comminges, Haute-Garonne (Latin), now Saint-Bertrand-de-Comminges, Haute-Garonne Lyon, Rhône, Latin Colonia Copia Claudia Augusta Lugdunum From Celtic *lug- 'Lugus' (divine name) or perhaps 'light' + *dūnon 'fortress' Lemonum (Latin), now Poitiers, Vienne First element from Celtic *lemo- 'elm'. Nant , Nans , Nantes Nanteuil Nanterre Noviomagus Lexoviorum (Latin), now Lisieux, Calvados (Latin), now Lisieux, Calvados Noviomagus Tricastinorum (Latin), now Saint-Paul-Trois-Châteaux, Drôme (Latin), now Saint-Paul-Trois-Châteaux, Drôme Noyon, Oise, Latin Noviomagus Veromanduorum From Celtic *nowijo- 'new' (Welsh newydd) + *magos 'field, plain' Périgueux Samarobrīva (Latin), now Amiens, Somme "Bridge on the [river] Somme". River name Samara + Celtic *brīwa 'bridge'. Oissel , Oisseau-le-Petit , several Ussel , etc. , , several , etc. Orange < Arausio, a water god < Arausio, a water god Pierremande < Petromantalum < petro-matalo- 'four road' = 'crossing' < < 'four road' = 'crossing' Paris < Parisii (Gaul), a Celtic people situated on the banks of the Seine river < Parisii (Gaul), a Celtic people situated on the banks of the Seine river Rennes Rouen < Rotomagus , [3] sometimes Ratómagos or Ratumacos (on the coins of the Veliocassi tribe). It can be roto- , the word for 'wheel' or 'race', cf. Old Irish roth 'wheel' 'race' or Welsh rhod 'wheel' 'race'. Magos is surer here : 'field', 'plain' or later 'market' cf. Old Irish mag (gen. maige ) 'field' 'plain', Old Breton ma 'place'. The whole thing could mean 'hippodrome', 'racecourse' or 'wheel market'. [4] < , sometimes or (on the coins of the tribe). It can be , the word for 'wheel' or 'race', cf. Old Irish 'wheel' 'race' or Welsh 'wheel' 'race'. is surer here : 'field', 'plain' or later 'market' cf. Old Irish (gen. ) 'field' 'plain', Old Breton 'place'. The whole thing could mean 'hippodrome', 'racecourse' or 'wheel market'. Vandœuvres , Vendeuvre < vindo-briga 'white fortress' , < 'white fortress' 'Verdun, Latin "Virodunum" or "Verodunum" Second element from Celtic *dūnon fortress. Vernon < Vernomagus . There are other Vernons in France, but they come directly from Vernō 'place of the alder-trees'. 'plain of the alder-trees'. uernā 'alder-tree', Old Irish fern , Breton, Welsh gwern , dial. French verne / vergne . < . There are other Vernons in France, but they come directly from 'place of the alder-trees'. 'plain of the alder-trees'. 'alder-tree', Old Irish , Breton, Welsh , dial. French / . Veuves, Voves, Vion Germany [ edit ] From Celtic alisa, s.f., 'alder'. (Compare the modern German Erlenbach) and Old High German (OHG) aha, s.n., 'flowing water'. de Amarahe (?), a lost river name near Fulda c. 800 CE (?), a lost river name near Fulda c. 800 CE Amerbach , a stream near Groß-Umstadt, Babenhausen, Ober-Ramstadt , a stream near Groß-Umstadt, Babenhausen, Ober-Ramstadt Ammer Ammerbach Ammergraben , a stream near Harpertshausen , a stream near Harpertshausen Amorbach , a stream near Mümling and the village named after it. , a stream near Mümling and the village named after it. Amorsbrunn Wald-Amorbach Perhaps from Celtic ambara, 'channel, river'. Compare Indo-European *amer-, 'channel, river' > Greek ἀμάρη (amárē), 'channel'. Or, from Celtic amara, 'spelt, a type of grain'. Annelsbach a suburb of Höchst a suburb of Höchst Ansbach in Mittelfranken originally Onoltesbah 837 CE From Celtic *onno-, 'ash tree' plus an OHG bach, 'small river'. Boiodurum, now Innstadt, Passau, Niederbayern First element is Celtic *Boio-, tribal name (Boii), possibly 'cattle-owner' (cf. Irish bó 'cow') or 'warrior'. Second element is Celtic *duro- 'fort'. From Celtic *bona 'base, foundation' (Welsh bôn 'base, bottom, stump') From Gaulish Boudobriga, "hill of victory". Containing the elements *boudo- 'victory' (Welsh budd 'gain, benefit') + *briga, 'hill'. Düren, Nordrhein-Westfalen, Latin Durum From Celtic *duro- 'fort' Hercynia Silva (Latin), a vast forest including the modern Black Forest From Celtic *(φ)erkunos 'oak' or divine name Perkwunos + Latin silva 'forest' Second element from Celtic *dūnon 'fortress' Mainz, Rheinland-Pfalz, Latin Moguntiacum From Celtic *mogunt-, 'mighty, great, powerful', used as a divine name (see Mogons) From Celtic *mago-, 'plain, field' Neumagen-Dhron , Rheinland-Pfalz, Latin Noviomagus Trevirorum , Rheinland-Pfalz, Latin Noviomagus Nemetum (Latin), now Speyer, Rheinland-Pfalz From Celtic *nowijo- 'new' (Welsh newydd) + *magos 'field, plain' Remagen, Rheinland-Pfalz, Latin Rigomagus or Ricomagus Second element is from Celtic *magos 'field, plain' Some have seen this toponym as a hybrid form comprising a Celtic form and a Germanic suffix -ingen.[5] This may be so, since between the 2nd and 4th centuries, the area around the present day German university town of Tübingen was settled by a Celtic tribe with Germanic tribal elements mixed in. The element tub- in Tübingen could possibly arise from a Celtic dubo-, s.m., 'dark, black; sad; wild'. As found in the Anglo-Irish placenames of Dublin, Devlin, Dowling, Doolin and Ballindoolin. Perhaps the reference is to the darkness of the river waters that flow near the town; if so, then the name can be compared to the English Tubney, Tubbanford, Tub Mead and Tub Hole in England. Compare the late Vulgar Latin tubeta 'morass', from Gaulish. The root is found in Old Irish dub > Irish dubh, Old Welsh dub > Welsh du, Old Cornish duw > Middle Cornish du, Breton du Gaulish dubo-, dubis, all meaning 'black; dark' Worms, Rheinland-Pfalz, Latin Borbetomagus Second element from Celtic *magos, 'plain, field' Hungary [ edit ] From Celtic *(φ)erkunos 'oak' or divine name Perkwunos + Latin jugum 'summit' Italy [ edit ] Brianza, Lombardy, Latin Brigantia From Celtic *brigant- 'high, lofty, elevated' (or divine name, Brigantia) Genova, English Genoa, Latin Genua Perhaps from Celtic *genu- 'mouth [of a river]'. (However, this Ligurian place-name, as well as that of Genava (modern Geneva), probably derive the Proto-Indo-European root *ĝenu- 'knee', see Pokorny, IEW [1].) Milano, English Milan, Latin Mediolanum Unclear. First element looks like Latin medius 'middle'. Second element may be Celtic *landā 'land, place' (Welsh llan); or, *plan- > *lan-, a Celtic cognate of Latin plānus 'plain', with typical Celtic loss of /p/. Belluno, Veneto, Latin Bellunum From Celtic *Bhel- 'bright' and *dūnon 'fortress'. Bergamo, Lombardy, Latin Bergomum From Celtic *brigant- 'high, lofty, elevated' (or divine name, Brigantia) Brescia, Lombardy, Latin Brixia From Celtic *briga- 'rocky height or outcrop'. Bologna, Lombardy, Latin Bononia From Celtic *bona 'base, foundation' (Welsh bôn 'base, bottom, stump') Netherlands [ edit ] From Celtic *lug- 'Lugus' (divine name) or perhaps 'light' + *dūnon 'fortress' Nijmegen, Gelderland, Latin Ulpia Noviomagus Batavorum From Celtic *nowijo- 'new' (Welsh newydd) + *magos 'field, plain' Poland [ edit ] Lugidunum (Latin), now Legnica, Silesia Second element from Celtic *dūnon 'fortress' Portugal [ edit ] Portugal Portus Cale - Cale, the mother goddess of the Celtic people, the one who armed with a hammer formed mountains and valleys. She hides in the rocks. She is Mother Nature. Her other name is Cailleach (Calicia/Galiza) Cailleach-Bheur or Beira ( three Portuguese Provinces of the Central Mountain Region where Lusitania was located. Portus Cale - Cale, the mother goddess of the Celtic people, the one who armed with a hammer formed mountains and valleys. She hides in the rocks. She is Mother Nature. Her other name is Cailleach (Calicia/Galiza) Cailleach-Bheur or Beira ( three Portuguese Provinces of the Central Mountain Region where Lusitania was located. Braga, Braga Municipality, Portugal From Celtic *bracari- after the Bracari Celts. Bragança, Alto Trás-os-Montes, Portugal From Celtic *brigant- 'divine name, Brigantia'. From Celtic *beira- Cailleach/ Cale's other name Cailleach-Bheura or Beira, the Celtic Goddess of mountains, water and Winter. Three Portuguese provinces: Beira-Baixa, Beira-Alta and Beira-Litoral Vale de Cambra, Portugal From Celtic *cambra- 'chamber, room'.[6] Conímbriga, Coimbra, Portugal From Celtic *briga- 'rocky height or outcrop'. Coimbra Cymru place of the people in fellowship - where the people gathered as in at a fairgrounds. Related to the word Cumberland and Cambria. Cymru place of the people in fellowship - where the people gathered as in at a fairgrounds. Related to the word Cumberland and Cambria. Douro, Norte, Portugal From Celtic *Dur 'water'. Évora, Alentejo, Portugal From Celtic *ebora- 'plural genitive of the word eburos (trees)'. Lacobriga, Algarve, Portugal From Celtic *Lacobriga- 'Lake of Briga'. Romania [ edit ] Serbia [ edit ] Singidunum (Latin), now Beograd, English Belgrade Second element from Celtic *dūnon 'fortress' Slovenia [ edit ] Celje , Latinized Celeia in turn from * keleia , meaning 'shelter' in Celtic , Latinized in turn from * , meaning 'shelter' in Celtic Neviodunum (Latin), now Drnovo Second element from Celtic *dūnon 'fortress' Spain [ edit ] Asturias and Cantabria Deva, several rivers in northern Spain, and Pontedeva, Galicia, Spain. From Celtic *diwā- 'goddess; holy, divine' Mons Vindius (now the Cantabrian Mountains), NW Spain. From Celtic *windo- 'white'. Castile Segovia, Castile and León, Spain, Greek Segoubía. From *segu-, conjectured to be Celtic for 'victorious', 'strength' or 'dry' (theories). Galicia Switzerland [ edit ] Switzerland, especially the Swiss Plateau, has many Celtic (Gaulish) toponyms. This old layer of names was overlaid with Latin names in the Gallo-Roman period,[11] and, from the medieval period, with Alemannic German[12] and Romance[13] names. For some names, there is uncertainty as to whether they are Gaulish or Latin in origin. In some rare cases, such as Frick, Switzerland, there have even been competing suggestions of Gaulish, Latin and Alemannic etymologies.[14] Examples of toponyms with established Gaulish etymology: Solothurn, from Salodurum . The -durum element means "doors, gates; palisade; town". The etymology of the salo- element is unclear. . The element means "doors, gates; palisade; town". The etymology of the element is unclear. Thun, Berne: dunum "fort" "fort" Windisch, Aargau, Latin Vindonissa : first element from * windo- "white" : first element from * "white" Winterthur, Zürich, Latin Vitudurum or Vitodurum , from vitu "willow" and durum or , from "willow" and Yverdon-les-Bains, from Eburodunum , from eburo- "yew" and dunum "fort". [15] , from "yew" and "fort". Zürich, Latin Turicum , from a Gaulish personal name Tūros , from a Gaulish personal name Limmat, from Lindomagos "lake-plain", originally the name of the plain formed by the Linth and Lake Zurich. Insular Celtic [ edit ] Goidelic [ edit ] Ireland [ edit ] The vast majority of placenames in Ireland are anglicized Irish language names. Scotland [ edit ] The majority of placenames in the Highlands of Scotland (part of the United Kingdom) are either Scottish Gaelic or anglicized Scottish Gaelic. Gaelic-derived placenames are very common in the rest of mainland Scotland also. Pictish-derived placenames can be found in the northeast, while Brythonic-derived placenames can be found in the south. Isle of Man [ edit ] The majority of placenames on the Isle of Man (a Crown dependency) are Manx or anglicized Manx. Brythonic [ edit ] England (excluding Cornwall) [ edit ] Linguistic evidence for Celtic place-names in present-day England (part of the United Kingdom) can be found in names such as Leatherhead or Litchfield. In addition, evidence of Celtic populations can be found from those place-names including the Old English element wealh "foreigner, stranger, Briton". Such names are a minority, but are widespread across England. For example, a smattering of villages around the Fenland town of Wisbech hint at this: West Walton, Walsoken, and the Walpoles indicate the continued presence of an indigenous population, and Wisbech, King's Lynn and Chatteris retain Celtic topographical elements. Some villages that exhibit "Tydd" in their name, e.g. Tydd St Giles, may obtain that element from the Britonnic word for "small holding". Compare the Welsh tyddyn. Arden (forest), Warwickshire From Celtic *ardu- 'high' (Irish ard) Avon (river), Gloucestershire/Wiltshire/Somerset (river), Gloucestershire/Wiltshire/Somerset Avon (river), Wiltshire/Hampshire/Dorset (river), Wiltshire/Hampshire/Dorset Avon (river), Northamptonshire/Warwickshire/Worcestershire/Gloucestershire (river), Northamptonshire/Warwickshire/Worcestershire/Gloucestershire Avon or Aune (river), Devon From Brythonic *abona 'river' (Welsh afon) Axe (river), Devon/Dorset (river), Devon/Dorset Axe (river), Somerset (river), Somerset Axminster , Devon , Devon Axmouth, Devon From Celtic *iska 'water' (Irish uisce) First element from Celtic *briga 'hill' Brent (river), Greater London (river), Greater London Brentford, Greater London From Celtic *brigant- 'high, lofty, elevated' (or divine name, Brigantia) Bryn, Greater Manchester Derived from Welsh bryn, 'hill'. Camulodunum (Latin), now Colchester, Essex From *kamulos 'Camulus' (divine name) + Celtic *dūnon 'fortress' First element from Brythonic *crüg 'hill'[16] (Irish cruach) Dever (river), Hampshire (river), Hampshire Deverill (river), Wiltshire (river), Wiltshire Devon, Latin Dumnonia From tribal name Dumnonii or Dumnones, from Celtic *dumno- 'deep', 'world' Dover, Kent, Latin Dubris From Celtic *dubr- 'water', *dubrās 'waters' (Welsh dwfr; Breton dour) Durham, County Durham, Latin Dunelm First element is possibly dun, ' hill fort' (Welsh ddin, 'fort'). First element from Celtic *duro- 'fort'; in Dūrobrīvae, Celtic *brīwa 'bridge' Eskeleth, North Yorkshire Possibly derived from Brythonic *iska, 'water, fish' and *leith, 'damp, wet'. From Celtic *iska 'water' (Irish uisce); second element in Isca Dumnoniorum (Exeter) is a tribal name (see Devon) Leatherhead, Surrey From Brythonic *lēd- [from Celtic *leito-] + *rïd- [from Celtic *(φ)ritu-] = "Grey Ford"[16] Lincoln, Lincolnshire, Latin Lindum Colonia From Celtic *lindo- 'pool' + Latin colonia 'colony' Manchester, Latin Mamucium or Mancunium From Celtic *mamm- 'breast' (referring to the shape of a hill) Noviomagus (Latin), now Chichester, West Sussex and Crayford, Kent From Celtic *nowijo- 'new' (Welsh newydd) + *magos 'field, plain' Pengethley, Herefordshire From Brythonic *penn- 'hill, top, head, chief' (Welsh pen) + possibly *kelli 'to stand' (Welsh gelli) Pencoyd , Herefordshire , Herefordshire Penge , Greater London , Greater London Penketh, Cheshire From Brythonic *penn- 'hill, top, head, chief' (Welsh pen) + *koid- 'wood' (Welsh coed), or *cēd- 'wood'[16] Pencraig , Herefordshire , Herefordshire Pendlebury , Greater Manchester , Greater Manchester Pendleton , Lancashire , Lancashire Pendock, Worcestershire First element from Brythonic *penn- 'hill, top, head, chief' (Welsh pen 'head, end, chief, supreme') = Irish ceann 'head', from Proto-Celtic *kwenno- Penn , Buckinghamshire , Buckinghamshire Penn, West Midlands From Brythonic *penn- 'hill' (Welsh pen) Lower Penn, Staffordshire From English lower + Brythonic *penn- 'hill' Penshaw, Sunderland From Brythonic *penn- 'hill' and possibly p-Celtic *carr 'rocks'. This matches the earliest attestation from c. 1190, Pencher. Old Sarum, Wiltshire, Latin Sorviodūnum Second element from Celtic *dūnon 'fortress' Segedunum (Latin), now Wallsend, Tyne and Wear First element conjectured to be Celtic for 'victorious', 'strength' or 'dry' (theories). Second element is Celtic *dūnon 'fortress'. Sinodun Hills, Berkshire From Celtic *seno- 'old' + *dūnon 'fortress' Tamar (river), Devon/Cornwall (river), Devon/Cornwall Tame (river), Greater Manchester (river), Greater Manchester Tame (river), North Yorkshire (river), North Yorkshire Tame (river), West Midlands (river), West Midlands Team (river), Tyne and Wear (river), Tyne and Wear Teme (river), Welsh Tefeidiad , Wales/Shropshire/Worcestershire (river), Welsh , Wales/Shropshire/Worcestershire Thames (river), Latin Tamesis Possibly from Celtic *tames- 'dark' (cf. Celtic *temeslos > Welsh tywyll 'darkness'). Other theories. Trinovantum (Latin), now London 'Of the Trinovantes', a tribal name, perhaps 'very energetic people' from Celtic *tri- (intensive) + *now- 'energetic', related to *nowijo- 'new' (Welsh newydd) Verulamium (Latin), now St Albans, Hertfordshire From Brittonic *weru- 'broad' + *lam- 'hand' [from Celtic *(φ)lāmā] (Welsh llaw, Irish láimh) Vindobala (Latin), Roman fort in Northumberland (Latin), Roman fort in Northumberland Vindolanda (Latin), Roman fort in Northumberland (Latin), Roman fort in Northumberland Vindomora (Latin), Roman fort in County Durham. First element from Celtic *windo- 'white' (Welsh gwyn); in Vindolanda, Celtic *landā 'land, place' (Welsh llan). In Vindomora, second element could be 'sea' (Welsh môr, Irish muir). Wigan , Greater Manchester , Greater Manchester York, Greek Ebōrakon, Latin Eboracum or Eburacum from Celtic *eburo- 'yew' Wales [ edit ] The vast majority of placenames in Wales (part of the United Kingdom) are either Welsh or anglicized Welsh. Cornwall [ edit ] The vast majority of placenames in Cornwall (part of England) are either Cornish or anglicized Cornish. For examples, see List of places in Cornwall. Brittany [ edit ] The vast majority of placenames in the west of Brittany (part of France) are either Breton or derived from Breton. For examples, see Category:Populated places in Brittany. See also [ edit ]
<reponame>helmesjo/pipeable<gh_stars>1-10 #include <pipeable/data_generator.hpp> #include <catch2/catch.hpp> #include <variant> using namespace pipeable; namespace { struct int_to_int { int operator()(int val) { receivedValue = true; return val; } bool receivedValue = false; }; struct int_and_string_receiver { int receivedInt = 0; std::string receivedStr = ""; void operator()(int val) { receivedInt = val; } void operator()(const std::string& val) { receivedStr = val; } }; } SCENARIO("Compose pipelines with a data generator") { GIVEN("a data generator") { data_generator<int> generator; WHEN("generator is piped to receiver") { int_to_int receiver; generator += &receiver; THEN("no data is forwarded automatically") { REQUIRE(receiver.receivedValue == false); } THEN("data is forwarded to the downstream pipeline") { generator(1); REQUIRE(receiver.receivedValue == true); } AND_WHEN("receiver is deregistered from generator") { generator -= &receiver; THEN("data is no longer forwarded") { receiver.receivedValue = false; generator(1); REQUIRE(receiver.receivedValue == false); } } } } GIVEN("a data generator outputting variant<x, y>") { data_generator<std::variant<int, std::string>> generator; WHEN("it is piped as: generator += visit >>= receiver") { int_and_string_receiver receiver; generator += visit >>= &receiver; AND_WHEN("generator is invoked with x") { generator(1); THEN("receiver::operator(x) is invoked") { REQUIRE(receiver.receivedInt == 1); } } AND_WHEN("generator is invoked with y") { generator("hello"); THEN("receiver::operator(y) is invoked") { REQUIRE(receiver.receivedStr == "hello"); } } } } } SCENARIO("Non const reference output generator") { GIVEN("a data generator outputting non-const reference") { data_generator<int&> generator; WHEN("piped to receiver with non const reference") { struct receiver_t { void operator()(int& mutableVal) { mutableVal = 10; } } mutatingReceiver; generator += &mutatingReceiver; THEN("it receives the generated int") { int mutableInt = 1; generator(mutableInt); REQUIRE(mutableInt == 10); } } } } SCENARIO("By value output generator") { // These test is important to verify it works with input that can be MOVED FROM. We still want both receivers to receive the value. GIVEN("a data generator outputting by value") { data_generator<int, std::string> generator; WHEN("piped to multiple receivers accepting r-value reference") { struct receiver_t { int receivedInt = 0; std::string receivedString; void operator()(int&& val) { receivedInt = val; } void operator()(std::string&& val) { receivedString = val; } } receiver1, receiver2; generator += &receiver1; generator += &receiver2; THEN("they receives the generated int") { generator(1); REQUIRE(receiver1.receivedInt == 1); REQUIRE(receiver2.receivedInt == 1); } THEN("they receives the generated string") { generator(std::string("dummy")); REQUIRE(receiver1.receivedString == "dummy"); REQUIRE(receiver2.receivedString == "dummy"); } } } } SCENARIO("By r-value output generator") { // These tests are important to verify it works as expected with input that can be MOVED FROM. Only first receiver should receive the value. GIVEN("a data generator outputting by r-value") { data_generator<int&&, std::string&&> generator; WHEN("piped to multiple receivers accepting r-value reference") { struct receiver_t { int receivedInt = 0; std::string receivedString; void operator()(int&& val) { receivedInt = val; } void operator()(std::string&& val) { receivedString = std::move(val); } } receiver1, receiver2; generator += &receiver1; generator += &receiver2; THEN("they receive the generated int (it can't be moved from)") { generator(1); REQUIRE(receiver1.receivedInt == 1); REQUIRE(receiver2.receivedInt == 1); } THEN("only one receives the generated string") { generator(std::string("dummy")); REQUIRE(receiver1.receivedString == "dummy"); REQUIRE(receiver2.receivedString == ""); } } WHEN("piped to multiple receivers accepting by value") { struct receiver_t { int receivedInt = 0; std::string receivedString; void operator()(int val) { receivedInt = val; } void operator()(std::string val) { receivedString = val; } } receiver1, receiver2; generator += &receiver1; generator += &receiver2; THEN("they receive the generated int (it can't be moved from)") { generator(1); REQUIRE(receiver1.receivedInt == 1); REQUIRE(receiver2.receivedInt == 1); } // This test is important to verify it works as expected with input that can be MOVED FROM. THEN("only one receives the generated string") { generator(std::string("dummy")); REQUIRE(receiver1.receivedString == "dummy"); // It has been moved from REQUIRE(receiver2.receivedString == ""); } } } } SCENARIO("multi-output generator") { GIVEN("a generator outputting int & string") { data_generator<int, std::string> multiGenerator; WHEN("piped to receiver callable with int") { int receivedInt = 0; const auto receiver = [&](int val) { receivedInt = val; }; multiGenerator += &receiver; THEN("it receives the generated int") { multiGenerator(1); REQUIRE(receivedInt == 1); } } WHEN("piped to receiver callable with string") { std::string receivedStr = ""; const auto receiver = [&](std::string val) { receivedStr = val; }; multiGenerator += &receiver; THEN("it receives the generated string") { multiGenerator("1"); REQUIRE(receivedStr == "1"); } } WHEN("piped to receiver callable with int and string") { struct receiver_t { int receivedInt = 0; std::string receivedStr = ""; void operator()(int val) { receivedInt = val; } void operator()(std::string val) { receivedStr = val; } void operator()(std::tuple<>) { } // dummy } receiver; multiGenerator += &receiver; THEN("it receives the generated int") { multiGenerator(1); REQUIRE(receiver.receivedInt == 1); } THEN("it receives the generated string") { multiGenerator("1"); REQUIRE(receiver.receivedStr == "1"); } } WHEN("piped to r-value receiver containing shared_ptr, callable with both int and string") { auto wasPtrEmpty = false; multiGenerator += [&wasPtrEmpty, ptr = std::make_shared<int>()](const auto& value) mutable { wasPtrEmpty = ptr == nullptr; }; THEN("contained shared_ptr wasn't moved from (is empty)") { multiGenerator(1); REQUIRE(!wasPtrEmpty); } THEN("contained shared_ptr wasn't moved from (is empty)") { multiGenerator("1"); REQUIRE(!wasPtrEmpty); } } } } SCENARIO("Chaining generators") { GIVEN("A generator outputting int") { data_generator<int> gen1; WHEN("chained to another generator outputting int") { data_generator<int> gen2; gen1 += &gen2; bool didReceiveData = false; auto receiver = [&](int) { didReceiveData = true; }; gen2 += &receiver; THEN("invoking first generator will invoke second") { gen1(1); REQUIRE(didReceiveData); } } } }
#!/usr/bin/env python # -*- coding: utf-8 -*- import sqlite3 def connectToDb(): return sqlite3.connect('grades.db') def getCursor(con): return con.cursor() def createTables(cur): cur.execute('CREATE TABLE grades (' 'course VARCHAR(255) PRIMARY KEY,' 'grade VARCHAR(64)' ');')
def fetch_contracts(self) -> dict: data = self.fetch(f'/contracts/v1/contracts/{self.puuid}') return data
/* Set the image and the text for all the components of the layout */ private void populateViews() { collapsingToolbarLayout.setTitle(movie.getOriginalTitle()); if (isInternetConnection) { String moviePosterPath = "https://image.tmdb.org/t/p/w500" + movie.getPosterPath(); Glide .with(getApplicationContext()) .load(moviePosterPath) .into(moviePoster); loadTrailers(); loadReviews(); } releaseDate.setText(movie.getReleaseDate()); rating.setRating((int) Math.round(movie.getVoteAverage())); voteAverage.setText(movie.getVoteAverage() + "/10"); overview.setText(movie.getOverview()); }
/// Parses an `if` expression or statement. fn parse_if_expression(&mut self, token: Option<Token>) -> Result<NodeId, Error> { let mut branch_token = self.some_or_next(token)?; let mut branches = Vec::new(); let mut is_elif = true; loop { // If it's an else, the condition is always the null node. let condition = if is_elif { self.parse_expression(0)? } else { NodeId::null() }; // The branch body can be either `->` followed by an expression, or a block of code. let mut branch_body = Vec::new(); if let Some(..) = self.match_token(TokenKind::Then)? { branch_body.push(self.parse_expression(0)?); } else { self.parse_indented_block( &mut branch_body, &branch_token, |p| p.parse_statement(), || ErrorKind::MissingLineBreakAfterStatement, )?; } // Construct the branch. let branch = self.ast.create_node(if is_elif { NodeKind::IfBranch } else { NodeKind::ElseBranch }); let body_span = self.span_all_nodes(&branch_body); let span = if is_elif { Span::join(self.ast.span(condition), &body_span) } else { body_span }; self.ast.set_span(branch, span); self.ast.set_first_handle(branch, condition); self.ast.set_extra(branch, NodeData::NodeList(branch_body)); branches.push(branch); // If the current branch is an `elif` branch, look ahead for the next one. if is_elif { if let Some(token) = self.match_token(TokenKind::Elif)? { branch_token = token; } else if let Some(token) = self.match_token(TokenKind::Else)? { branch_token = token; is_elif = false; } else { break; } } else { break; } } let node = self.ast.create_node(NodeKind::If); self.ast.set_span( node, Span::join(&branch_token.span, &self.span_all_nodes(&branches)), ); self.ast.set_extra(node, NodeData::NodeList(branches)); Ok(node) }
Two Berkeley alumni have won MacArthur fellowships — unsolicited, no-strings-attached awards of $625,000 given to exceptionally creative people. The two — Gene Luen Yang, a Bay Area graphic novelist, and Josh Kun, a cultural critic in Los Angeles — are among 23 fellows named by the MacArthur Foundation last night. Gene Luen Yang studied engineering at Berkeley, earning his BS in 1995, before going on to make his name as a graphic novelist and cartoonist “whose work for young adults demonstrates the potential of comics to broaden our understanding of diverse cultures and people,” according to the MacArthur announcement. A San Jose resident, Yang wrote full-length graphic novels, serial comics and short stories exploring multicultural themes while teaching computer science and serving as director of information services at Bishop O’Dowd High School in Oakland from 1998 to 2015. He is one of the writers of DC Comics’ New Super-Man, which features a new Chinese protagonist, Kenan Kong from Shanghai, according to his MacArthur bio. Yang “aims to change our understanding of different cultures and people, and is an advocate for diverse characters and diverse writers in children’s and young adult literature,” his bio states. Earlier this year, he was selected as the new National Ambassador for Young People’s Literature Josh Kun earned his Ph.D. in ethnic studies at Berkeley in 1999 and is a professor of communication at USC’s Annenberg School. As a cultural historian, he explores “the ways in which the arts and popular culture are conduits for cross-cultural exchange,” according to his MacArthur citation. “In work that spans academic scholarship, exhibitions, and performances, Kun unearths and brings to life forgotten historical narratives through finely grained analyses of material and sonic manifestations of popular culture. He complicates our understanding of the evolution of racial and ethnic identity in America…” the MacArthur bio says. In two recent books, To Live and Dine in LA and Songs in the Key of Los Angeles, he “focuses on bringing present-day communities together around historical intersections of cultural expression.” Altogether, Californians won 10 of this year’s 23 MacArthurs. Read Gene Luen Yang’s bio on the MacArthur website. Read Josh Kun’s bio on the MacArthur website.
Reoperation in Treatment of Clubfoot We evaluated 118 operations (including 57 reoperations) on clubfeet. We concluded that relapse is most often the consequence of insufficient primary surgery, and the less sufficient the intervention, the more severe the relapse or residual deformity. The severity of deformity is determined and shown, first of all, by the position of calcaneum. In milder cases, soft tissue operation can prove useful, whereas more severe cases require bony intervention as well. The second, rarer cause of relapse was the fact that the foot was originally stiff and nonreducible. These feet may need multiple reoperations.
import { BaseEntity, CreateDateColumn, Entity, JoinColumn, OneToOne, PrimaryColumn, UpdateDateColumn, } from 'typeorm'; import { DocumentRO16 } from './document-ro16.entity'; import { DocumentRO26 } from './document-ro26.entity'; import { DocumentRO01 } from './document-ro01.entity'; import { TransactionDocument } from './transaction.entity'; export const PREFIX_MAPPING = `mapping_document_`; @Entity() export class MappingDocument extends BaseEntity { @PrimaryColumn() id: string; @OneToOne(() => DocumentRO01, { cascade: true }) @JoinColumn() documentRO01: DocumentRO01; @OneToOne(() => DocumentRO16, { cascade: true }) @JoinColumn() documentRO16: DocumentRO16; @OneToOne(() => DocumentRO26, { cascade: true }) @JoinColumn() docuemntRO26: DocumentRO26; @OneToOne(() => TransactionDocument, (transaction) => transaction.mapping) @JoinColumn() transaction: TransactionDocument; @CreateDateColumn({ type: 'timestamptz' }) create_date: Date; @UpdateDateColumn({ type: 'timestamptz' }) update_date: Date; }
/** * Traspose map. * * @param nodesByEdgeMap the nodes by edge map * @return the map */ private Map<N, Set<E>> trasposeMap(Map<E, Set<N>> nodesByEdgeMap) { Map<N, Set<E>> edgeByNodeMap = new HashMap<>(); for(Entry<E, Set<N>> entry : nodesByEdgeMap.entrySet()){ E edge = entry.getKey(); for (N adjacentNode : entry.getValue()){ if(!edgeByNodeMap.containsKey(adjacentNode)){ edgeByNodeMap.put(adjacentNode, new HashSet<E>()); } edgeByNodeMap.get(adjacentNode).add(edge); } } return edgeByNodeMap; }
<reponame>vonatzigenc/quarkus package io.quarkus.grpc.server.devmode; import static io.restassured.RestAssured.when; import static org.assertj.core.api.Assertions.assertThat; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import org.awaitility.Awaitility; import org.hamcrest.Matchers; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import com.example.test.MutinyStreamsGrpc; import com.example.test.StreamsGrpc; import com.example.test.StreamsOuterClass; import devmodetest.v1.Devmodetest; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; import io.grpc.examples.helloworld.GreeterGrpc; import io.grpc.examples.helloworld.HelloReply; import io.grpc.examples.helloworld.HelloRequest; import io.quarkus.test.QuarkusDevModeTest; import io.smallrye.mutiny.Multi; import io.smallrye.mutiny.subscription.Subscribers; public class GrpcDevModeTest { @RegisterExtension public static final QuarkusDevModeTest test = new QuarkusDevModeTest() .setArchiveProducer( () -> ShrinkWrap.create(JavaArchive.class) .addClasses(DevModeTestService.class, DevModeTestStreamService.class, DevModeTestInterceptor.class, DevModeTestRestResource.class) .addPackage(GreeterGrpc.class.getPackage()).addPackage(HelloReply.class.getPackage()) .addPackage(Devmodetest.class.getPackage()).addPackage(StreamsGrpc.class.getPackage()) .addPackage(StreamsOuterClass.Item.class.getPackage())) .setCodeGenSources("proto"); protected ManagedChannel channel; @BeforeEach public void init() { channel = ManagedChannelBuilder.forAddress("localhost", 9000) .usePlaintext() .build(); } @AfterEach public void shutdown() throws InterruptedException { channel.shutdownNow().awaitTermination(2, TimeUnit.SECONDS); } @Test public void testInterceptorReload() { callHello("Winnie", ".*Winnie"); assertThat(when().get("/test/interceptor-status").asString()).isEqualTo("status"); test.modifySourceFile("DevModeTestInterceptor.java", text -> text.replace("return \"status\"", "return \"altered-status\"")); callHello("Winnie", ".*Winnie"); assertThat(when().get("/test/interceptor-status").asString()).isEqualTo("altered-status"); } @Test public void testSingleReload() { callHello("Winnie", "Hello, Winnie"); test.modifySourceFile("DevModeTestService.java", text -> text.replaceAll("String greeting = .*;", "String greeting = \"hello, \";")); callHello("Winnie", "hello, Winnie"); } @Test public void testReloadAfterRest() { test.modifySourceFile("DevModeTestService.java", text -> text.replaceAll("String greeting = .*;", "String greeting = \"hell no, \";")); test.modifySourceFile("DevModeTestRestResource.java", text -> text.replace("testresponse", "testresponse2")); assertThat(when().get("/test").asString()).isEqualTo("testresponse2"); callHello("Winnie", "hell no, Winnie"); } @Test public void testReloadBeforeRest() { test.modifySourceFile("DevModeTestService.java", text -> text.replaceAll("String greeting = .*;", "String greeting = \"hell yes, \";")); test.modifySourceFile("DevModeTestRestResource.java", text -> text.replace("testresponse", "testresponse3")); callHello("Winnie", "hell yes, Winnie"); assertThat(when().get("/test").asString()).isEqualTo("testresponse3"); } @Test public void testEchoStreamReload() { final CopyOnWriteArrayList<String> results = new CopyOnWriteArrayList<>(); CompletionStage<Boolean> firstStreamFinished = callEcho("foo", results); Awaitility.await().atMost(10, TimeUnit.SECONDS) .until(() -> results, Matchers.hasItem("echo::foo")); test.modifySourceFile("DevModeTestStreamService.java", text -> text.replace("echo::", "newecho::")); final CopyOnWriteArrayList<String> newResults = new CopyOnWriteArrayList<>(); callEcho("foo", newResults); Awaitility.await().atMost(10, TimeUnit.SECONDS) .until(() -> newResults, Matchers.hasItem("newecho::foo")); assertThat(firstStreamFinished).isCompleted(); } @Test public void testProtoFileChangeReload() throws InterruptedException { callHello("HACK_TO_GET_STATUS_NUMBER", "2"); test.modifyFile("proto/devmodetest.proto", text -> text.replaceAll("TEST_ONE = .*;", "TEST_ONE = 15;")); Thread.sleep(5000); // to wait for eager reload for code gen sources to happen callHello("HACK_TO_GET_STATUS_NUMBER", "15"); } private CompletionStage<Boolean> callEcho(String name, List<String> output) { CompletableFuture<Boolean> result = new CompletableFuture<>(); Multi<StreamsOuterClass.Item> request = Multi.createFrom() .item(name) .map(StreamsOuterClass.Item.newBuilder()::setName) .map(StreamsOuterClass.Item.Builder::build); Multi<StreamsOuterClass.Item> echo = MutinyStreamsGrpc.newMutinyStub(channel) .echo(request); echo.subscribe().withSubscriber(Subscribers.from( item -> output.add(item.getName()), error -> { error.printStackTrace(); result.completeExceptionally(error); }, () -> result.complete(true), s -> s.request(Long.MAX_VALUE))); return result; } private void callHello(String name, String responseMatcher) { HelloReply reply = GreeterGrpc.newBlockingStub(channel) .sayHello(HelloRequest.newBuilder().setName(name).build()); assertThat(reply.getMessage()).matches(responseMatcher); } }
<filename>node_modules/bip174/src/lib/converter/index.d.ts /// <reference types="node" /> import * as globalXpub from './global/globalXpub'; import * as unsignedTx from './global/unsignedTx'; import * as finalScriptSig from './input/finalScriptSig'; import * as finalScriptWitness from './input/finalScriptWitness'; import * as nonWitnessUtxo from './input/nonWitnessUtxo'; import * as partialSig from './input/partialSig'; import * as porCommitment from './input/porCommitment'; import * as sighashType from './input/sighashType'; import * as witnessUtxo from './input/witnessUtxo'; declare const globals: { unsignedTx: typeof unsignedTx; globalXpub: typeof globalXpub; checkPubkey: (keyVal: import("../interfaces").KeyValue) => Buffer | undefined; }; declare const inputs: { nonWitnessUtxo: typeof nonWitnessUtxo; partialSig: typeof partialSig; sighashType: typeof sighashType; finalScriptSig: typeof finalScriptSig; finalScriptWitness: typeof finalScriptWitness; porCommitment: typeof porCommitment; witnessUtxo: typeof witnessUtxo; bip32Derivation: { decode: (keyVal: import("../interfaces").KeyValue) => import("../interfaces").Bip32Derivation; encode: (data: import("../interfaces").Bip32Derivation) => import("../interfaces").KeyValue; check: (data: any) => data is import("../interfaces").Bip32Derivation; expected: string; canAddToArray: (array: import("../interfaces").Bip32Derivation[], item: import("../interfaces").Bip32Derivation, dupeSet: Set<string>) => boolean; }; redeemScript: { decode: (keyVal: import("../interfaces").KeyValue) => Buffer; encode: (data: Buffer) => import("../interfaces").KeyValue; check: (data: any) => data is Buffer; expected: string; canAdd: (currentData: any, newData: any) => boolean; }; witnessScript: { decode: (keyVal: import("../interfaces").KeyValue) => Buffer; encode: (data: Buffer) => import("../interfaces").KeyValue; check: (data: any) => data is Buffer; expected: string; canAdd: (currentData: any, newData: any) => boolean; }; checkPubkey: (keyVal: import("../interfaces").KeyValue) => Buffer | undefined; }; declare const outputs: { bip32Derivation: { decode: (keyVal: import("../interfaces").KeyValue) => import("../interfaces").Bip32Derivation; encode: (data: import("../interfaces").Bip32Derivation) => import("../interfaces").KeyValue; check: (data: any) => data is import("../interfaces").Bip32Derivation; expected: string; canAddToArray: (array: import("../interfaces").Bip32Derivation[], item: import("../interfaces").Bip32Derivation, dupeSet: Set<string>) => boolean; }; redeemScript: { decode: (keyVal: import("../interfaces").KeyValue) => Buffer; encode: (data: Buffer) => import("../interfaces").KeyValue; check: (data: any) => data is Buffer; expected: string; canAdd: (currentData: any, newData: any) => boolean; }; witnessScript: { decode: (keyVal: import("../interfaces").KeyValue) => Buffer; encode: (data: Buffer) => import("../interfaces").KeyValue; check: (data: any) => data is Buffer; expected: string; canAdd: (currentData: any, newData: any) => boolean; }; checkPubkey: (keyVal: import("../interfaces").KeyValue) => Buffer | undefined; }; export { globals, inputs, outputs };
<reponame>justinkterry/MAgent /** * \file grid_def.h * \brief some global definition for gridworld */ #ifndef MAGNET_GRIDWORLD_GRIDDEF_H #define MAGNET_GRIDWORLD_GRIDDEF_H #include "../Environment.h" #include "../utility/utility.h" namespace magent { namespace gridworld { typedef enum {EAST, SOUTH, WEST, NORTH, DIR_NUM} Direction; typedef enum { OP_AND, OP_OR, OP_NOT, /***** split *****/ OP_KILL, OP_AT, OP_IN, OP_COLLIDE, OP_ATTACK, OP_DIE, OP_IN_A_LINE, OP_ALIGN, OP_NULL, } EventOp; struct Position { int x, y; }; typedef long long PositionInteger; typedef float Reward; typedef int Action; // some forward declaration class Agent; class AgentType; class Group; struct MoveAction; struct TurnAction; struct AttackAction; // reward description class AgentSymbol; class RewardRule; class EventNode; using ::magent::environment::Environment; using ::magent::environment::GroupHandle; using ::magent::utility::strequ; using ::magent::utility::NDPointer; } // namespace gridworld } // namespace magent #endif //MAGNET_GRIDWORLD_GRIDDEF_H
I have worked with Windows Phone 7 since it was in beta, so as you can imagine, I downloaded the Windows Phone 8 SDK as soon as it went live. For a bit of fun I decided to create a simple run-tracking application that showcases a number of these features ... and for an extra challenge do it all within 100 lines of code! (without resorting to writing compact and cryptic code). This article guides you through the application that I developed, delving into the following Windows Phone 8 features: Whilst it was perfectly possible to develop a run-tracking app with Windows Phone 7 (and there are a number of good examples in the Marketplace), the new features and capabilities of Windows Phone 8 can be used to make a much more feature-rich application. NOTE: I originally published this article on the Nokia Developer Wiki, but thought I would share it on CodeProject also, where most of my other articles have been published. The Application User Interface This application has quite a basic UI, which is composed of full-screen map, which has the run statistics overlayed on top of it as shown in the screenshot below: The application UI is defined in XAML as follows: < Grid util:GridUtils.RowDefinitions =" Auto, *" > < StackPanel Grid.Row =" 0" Margin =" 12,17,0,28" > < StackPanel Orientation =" Horizontal" > < Image Source =" /Assets/ApplicationIconLarge.png" Height =" 50" / > < TextBlock Text =" WP8Runner" VerticalAlignment =" Center" Margin =" 10 0 0 0" FontSize =" {StaticResource PhoneFontSizeLarge}" / > < /StackPanel > < /StackPanel > < Grid x:Name =" ContentPanel" Grid.Row =" 1" Margin =" 12,0,12,0" > < maps:Map x:Name =" Map" ZoomLevel =" 16" / > < Grid Background =" #99000000" Margin =" 20" VerticalAlignment =" Bottom" > < Grid Margin =" 20" util:GridUtils.RowDefinitions =" 40, 40, Auto" util:GridUtils.ColumnDefinitions =" *, *, *, *" > < TextBlock Text =" Distance:" / > < TextBlock Text =" 0 km" Grid.Column =" 1" x:Name =" distanceLabel" HorizontalAlignment =" Center" / > < TextBlock Text =" Time:" Grid.Column =" 2" / > < TextBlock Text =" 00:00:00" Grid.Column =" 3" x:Name =" timeLabel" HorizontalAlignment =" Center" / > < TextBlock Text =" Calories:" Grid.Row =" 1" / > < TextBlock Text =" 0" Grid.Column =" 1" x:Name =" caloriesLabel" HorizontalAlignment =" Center" Grid.Row =" 1" / > < TextBlock Text =" Pace:" Grid.Column =" 2" Grid.Row =" 1" / > < TextBlock Text =" 00:00" Grid.Column =" 3" x:Name =" paceLabel" HorizontalAlignment =" Center" Grid.Row =" 1" / > < Button Content =" Start" Grid.Row =" 2" Grid.ColumnSpan =" 4" Click =" StartButton_Click" x:Name =" StartButton" / > < /Grid > < /Grid > < /Grid > < /Grid > GridUtils is a utility class, which I wrote a number of years ago, that provides convenient shorthand for defining grid columns and rows (for WPF, Silverlight and WindowsPhone). If you are following along, by building this running app from scratch, then in order to add the map, you will have to include the following namespace definition: xmlns:maps= " clr-namespace:Microsoft.Phone.Maps.Controls;assembly=Microsoft.Phone.Maps" Before building and running the application, you have to include the mapping ‘capability’. To do this open up '''WPAppManifest.xml''', navigate to the Capabilities tab and check the ID_CAP_MAP checkbox. While you’re there, you may as well include ID_CAP_LOCATION as well: Capabilites are used to determine the phone features that your application uses so that users can more easily determine what an application does. With these capabilities included, build and run the application and you should see the same UI that was illustrated above. One of the improvements in the maps control is that it is fully vector-based (The Windows Phone 7 map is image-tile-based), this creates a much more smooth transition when the map is zoomed, and also allows for 3D transformations (as we will see a little later on). The map control also has a few other useful features for our running app, pedestrian-features and landmarks. These can be enabled as follows: < maps:Map x:Name =" Map" PedestrianFeaturesEnabled =" True" LandmarksEnabled =" True" ZoomLevel =" 16" / > With these features enabled the map illustrates useful features such as stairs, crossings and 3D landmarks: (By the way, I’m not counting the ~50 lines of XAML in my total lines-of-code count!) The Windows Phone 8 maps have many more new features that I have not used in this application. You could for example use the new ColorMode , which allows you to render a 'dark' map which is easier on the eyes in low light conditions. You could even make the run-tracking app choose the ColorMode based on the time of day! Timing The Run When the '''Start''' button is tapped the application tracks the user’s location using the phone’s built in GPS receiver, in order to mark their path on the map. It also times their run duration and generates various statistics of interest. We’ll start with the simpler of the two, timing the run. When the start button is clicked a DispatcherTimer is started and the time of the button tap recorded. On each timer ‘tick’ the label which indicates the elapsed run time is updated: public partial class MainPage : PhoneApplicationPage { private DispatcherTimer _timer = new DispatcherTimer(); private long _startTime; public MainPage() { InitializeComponent(); _timer.Interval = TimeSpan.FromSeconds( 1 ); _timer.Tick += Timer_Tick; } private void Timer_Tick( object sender, EventArgs e) { TimeSpan runTime = TimeSpan.FromMilliseconds(System.Environment.TickCount - _startTime); timeLabel.Text = runTime.ToString( @" hh\:mm\:ss" ); } private void StartButton_Click( object sender, RoutedEventArgs e) { if (_timer.IsEnabled) { _timer.Stop(); StartButton.Content = " Start" ; } else { _timer.Start(); _startTime = System.Environment.TickCount; StartButton.Content = " Stop" ; } } } With the above code in place, tapping the '''start''' button starts the timer. Location Tracking The next step is to track the location whilst the timer is running. The Windows Phone API has a GeoCoordinateWatcher class which fires a PositionChanged event which can be used to track the user’s location. It is very easy to render the user’s movements on a map via a MapPolyLine , which is a line path which is defined in terms of geocoordinates. Each time the event is fired, a new point is added to the line as follows: public partial class MainPage : PhoneApplicationPage { private GeoCoordinateWatcher _watcher = new GeoCoordinateWatcher(GeoPositionAccuracy.High); private MapPolyline _line; private DispatcherTimer _timer = new DispatcherTimer(); private long _startTime; public MainPage() { InitializeComponent(); _line = new MapPolyline(); _line.StrokeColor = Colors.Red; _line.StrokeThickness = 5 ; Map.MapElements.Add(_line); _watcher.PositionChanged += Watcher_PositionChanged; } private void StartButton_Click( object sender, RoutedEventArgs e) { if (_timer.IsEnabled) { _watcher.Stop(); _timer.Stop(); StartButton.Content = " Start" ; } else { _watcher.Start(); _timer.Start(); _startTime = System.Environment.TickCount; StartButton.Content = " Stop" ; } } private void Watcher_PositionChanged( object sender, GeoPositionChangedEventArgs<GeoCoordinate> e) { var coord = new GeoCoordinate(e.Position.Location.Latitude, e.Position.Location.Longitude); Map.Center = coord; _line.Path.Add(coord); } } With these few lines of extra code, the path of the user’s run is added to the map: The PositionChanged event handler can be developed further to compute the total run distance, calories burnt and pace. This makes use of the GeoCoordinate.GetDistanceTo method which can be used to compute the distance between two locations: private double _kilometres; private long _previousPositionChangeTick; private void Watcher_PositionChanged( object sender, GeoPositionChangedEventArgs<GeoCoordinate> e) { var coord = new GeoCoordinate(e.Position.Location.Latitude, e.Position.Location.Longitude); if (_line.Path.Count > 0 ) { var previousPoint = _line.Path.Last(); var distance = coord.GetDistanceTo(previousPoint); var millisPerKilometer = ( 1000 . 0 / distance) * (System.Environment.TickCount - _previousPositionChangeTick); _kilometres += distance / 1000 . 0 ; paceLabel.Text = TimeSpan.FromMilliseconds(millisPerKilometer).ToString( @" mm\:ss" ); distanceLabel.Text = string .Format( " {0:f2} km" , _kilometres); caloriesLabel.Text = string .Format( " {0:f0}" , _kilometres * 65 ); } Map.Center = coord; _line.Path.Add(coord); _previousPositionChangeTick = System.Environment.TickCount; } Runner’s do not measure pace in miles or kilometers per hour. Instead, pace is measured in terms of the time taken to travel a set distance. This method of measurement makes it much easier to determine your overall race time, e.g. if you are running at 4:00 minute-kilometers pace, you will complete a 5k race in 20 minutes. NOTE: The code above uses a pretty basic calorie calculation, assuming a burn rate of 65 calories per kilometer. A more accurate calculation would incorporate the runner's weight and pace, and other environmental factors. I'll leave this as an exercise for the reader! For developing applications that involve tracking a user’s location the emulator has some very useful features. You can record points along a route, then replay them at set intervals. You can also save the route as an XML file so that it can be replayed in future sessions: It takes a while to create a realistic dataset that emulates a real run, but at least you only have to do this once! Setting The Map Pitch and Heading Because of the vector nature of the Windows Phone 8 map it is possible to transform the view using the Pitch and Heading properties. The Pitch property sets the viewing angle of the map, providing a perspective rendering, rather than a top-down rendering, while the Heading property allows you to rotate the map. Most sat-nav systems use a combination of these effects to render the map so that it looks the same as the view directly in front of you. Many people find this type of map view much easier to understand (they do not have to perform rotate transforms in their head!). Adding this feature to the running app is really easy, firstly setting the map Pitch is simply done in the XAML: < maps:Map x:Name =" Map" PedestrianFeaturesEnabled =" True" LandmarksEnabled =" True" Pitch =" 55" ZoomLevel =" 18" / > Computing the heading is a little more complicated. In the previous section the current and previous location was used to compute pace and distance traveled. These two locations can be used to compute the heading, although the calculation is a little more involved. Fortunately I found a .NET library that contains some useful geolocation utilities, including one that computes heading. Using the .NET Extra library, finding and setting the heading is quite straightforward: PositionHandler handler = new PositionHandler(); var heading = handler.CalculateBearing( new Position(previousPoint), new Position(coord)); Map.SetView(coord, Map.ZoomLevel, heading, MapAnimationKind.Parabolic); Also, note that the above code uses the map SetView method rather than setting each property independently. If you set the properties directly, the map state changes immediately, which means that the view will ‘jump’ from one location/heading to another. Whereas SetView transitions from one location to another, producing a much more fluid UI. You can see the use of heading and pitch below, with a run in New York’s Central Park: Background Location Tracking With Windows Phone 7 you could run the foreground applications under the lock screen, which is a pretty important feature for a sports tracking application, this allows the user to lock their phone while their location is still tracked. Windows Phone 8 goes one better, applications that track geolocation can run in the background, which means they can keep tracking the user’s location while they use other apps – checking emails, or playing music for example. In order to turn on this feature you have to edit '''WMAppManifest.xml''' by hand, to do this right-click the file and select '''View code'''. Then locate the Tasks element and add the following. < Tasks > < DefaultTask Name =" _default" NavigationPage =" MainPage.xaml" > < BackgroundExecution > < ExecutionType Name =" LocationTracking" / > < /BackgroundExecution > < /DefaultTask > < /Tasks > And that’s it! When the application starts running in the background, the RunningInBackground application event is fired. You could use this to show a toast notification for example, but in the next section we’ll look at a more interesting way of keeping the user informed of the continued location tracking. A Live Tile Windows Phone 8 adds yet more tile templates, we’ll use the new ‘Iconic Template’ here. To select the template open up '''WMAppManifest.xml''' (using the visual editor this time!), and select the TemplateIconic template. Updating the tile state is as simple as sending a notification. Each time the location changes the following code is executed: ShellTile.ActiveTiles.First().Update( new IconicTileData() { Title = " WP8Runner" , WideContent1 = string .Format( " {0:f2} km" , _kilometres), WideContent2 = string .Format( " {0:f0} calories" , _kilometres * 65 ), }); Now if you pin the application to the start screen and use a wide tile format, while the location is being tracked in the background, the tile updates: And with that final change addition, the running application is complete! Conclusions Windows Phone 8 has some pretty cool new features that allow you to extend the capabilities of your application. In this article I have shown how a simple run tracking app benefits from many of these new features. Also, the expressive power of the APIs and frameworks allow you to develop complex applications with very little code. Clearly the application illustrated here is not complete! Why not have a go at developing it further yourself - why not try using isolated storage for recording your run history? or add summary statistics as charts? You could try using some of the other new Windows Phone 8 APIs such as voice-commands to control the start / stop of each run? Have fun! So, is this app really just 100 lines of code? You can download the sourcecode, WP8Runner.zip, and see for yourself that '''MainPage.xaml.cs''' is exactly 100 lines.
export const accountConfig = { twitterUrl: 'https://twitter.com/jeanrauwers', instagramUlr: 'https://instagram.com/dev.jeanrauwers', youtubeUrl: 'https://www.youtube.com/user/jeanrauwers' }; export const tableName: string = 'followersLikeApi'
mass_map = { "A": 71.03711, "C": 103.00919, "D": 115.02694, "E": 129.04259, "F": 147.06841, "G": 57.02146, "H": 137.05891, "I": 113.08406, "K": 128.09496, "L": 113.08406, "M": 131.04049, "N": 114.04293, "P": 97.05276, "Q": 128.05858, "R": 156.10111, "S": 87.03203, "T": 101.04768, "V": 99.06841, "W": 186.07931, "Y": 163.06333 } protein = "SKADYEK" def mapping_mass(x): return mass_map[str(x)] mass = map(mapping_mass, protein) print(sum(mass))
"""A Space representing an RGB Image.""" import numpy as np from akro.box import Box class Image(Box): """An Image, represented by a Box of at most three dimensions. This class allows us to type check the observation input and decide whether to normalize. Each dimension must have pixel values between [0, 255]. Args: shape(tuple): Shape of the observation. The shape cannot have more than 3 dimensions. """ def __init__(self, shape): assert len(shape) <= 3, 'Images must have at most three dimensions' super(Box, self).__init__(low=0, high=255, shape=shape, dtype=np.uint8)
Expansion and Intensification of the North American Monsoon During the Pliocene Southwestern North America (SWNA), like many subtropical regions, is predicted to become drier in response to anthropogenic warming. However, during the Pliocene, when carbon dioxide was above pre‐industrial levels, multiple lines of evidence suggest that SWNA was much wetter. While existing explanations for a wet Pliocene invoke increases in winter rain, recent modeling studies hypothesize that summer rain may have also played an important role. Here, we present the first direct evidence for an intensified mid‐Pliocene monsoon in SWNA using leaf wax hydrogen isotopes. These new records provide evidence that the mid‐Pliocene featured an intensified and expanded North American Monsoon. Using proxies and isotope‐enabled model simulations, we show that monsoon intensification is linked to amplified warming on the southern California margin relative to the tropical Pacific. This mechanism has clear relevance for understanding present‐day monsoon variations, since we show that intervals of amplified subtropical warming on the California margin, as are seen during modern California margin heat waves, are associated with a stronger monsoon. Because marine heat waves are predicted to increase in frequency, the future may bring intervals of “Pliocene‐like” rainfall that co‐exist with intensifying megadrought in SWNA, with implications for ecosystems, human infrastructure, and water resources.
// NewHub creates a new Hub with a unique name. If the ID is already in use // NewHub returns the hub with that ID as well as ErrDuplicateHubID func NewHub(id string) (*Hub, error) { if _, ok := hubs[id]; ok { return hubs[id], ErrDuplicateHubID } h := &Hub{} h.ID = id h.subscriptions = make(map[string]SubscriptionSet) hubs[id] = h return h, nil }
def code_object(fn): try: return fn.func_code except AttributeError: return fn.__code__
package auth import ( "fmt" "github.com/dgrijalva/jwt-go" "github.com/spf13/viper" "github.com/yasser-sobhy/sparrow/core" ) // TokenAuth allow users to login using JWT token type TokenAuth struct { APIEndpoint string Token string } func NewTokenAuth() TokenAuth { return TokenAuth{ ApiEndpoint: viper.GetInt("token_auth.api_endpoint"), Token: viper.GetInt("token_auth.token"), } } func (tokenAuth *TokenAuth) Login(ws *core.Conn, inputToken []byte) bool { // Parse takes the token string and a function for looking up the key. The latter is especially // useful if you use multiple keys for your application. The standard is to use 'kid' in the // head of the token to identify which key to use, but the parsed token (head and claims) is provided // to the callback, providing flexibility. token, err := jwt.Parse(inputToken, func(token *jwt.Token) (interface{}, error) { // Don't forget to validate the alg is what you expect: if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { return nil, fmt.Errorf("Unexpected signing method: %v", token.Header["alg"]) } // hmacSampleSecret is a []byte containing your secret, e.g. []byte("my_secret_key") return hmacSampleSecret, nil }) if claims, ok := token.Claims.(jwt.MapClaims); ok && token.Valid { fmt.Println(claims["foo"], claims["nbf"]) } else { fmt.Println(err) } }
<reponame>mdhom/python-e3dc #!/usr/bin/env python # Python class to connect to an E3/DC system. # # Copyright 2017 <NAME> <<EMAIL>> # Licensed under a MIT license. See LICENSE for details import math import struct import time import zlib from . import _rscpTags as rscpTags packFmtDict_FixedSize = { "Bool": "?", "Char8": "b", "UChar8": "B", "Int16": "h", "Uint16": "H", "Int32": "i", "Uint32": "I", "Int64": "q", "Uint64": "Q", "Float32": "f", "Double64": "d", } packFmtDict_VarSize = { "Bitfield": "s", "CString": "s", "Container": "s", "ByteArray": "s", "Error": "s", } def rscpFindTag(decodedMsg, tag): """Finds a submessage with a specific tag. Args: decodedMsg (list): the decoded message tag (str): the RSCP Tag string to search for Returns: list: the found tag """ if decodedMsg is None: return None if decodedMsg[0] == tag: return decodedMsg if isinstance(decodedMsg[2], list): for msg in decodedMsg[2]: msgValue = rscpFindTag(msg, tag) if msgValue is not None: return msgValue return None def rscpFindTagIndex(decodedMsg, tag, index=2): """Finds a submessage with a specific tag and extracts an index. Args: decodedMsg (list): the decoded message tag (str): the RSCP Tag string to search for index (Optional[int]): the index of the found tag to return. Default is 2, the value of the Tag. Returns: the content of the configured index for the tag. """ tag = rscpFindTag(decodedMsg, tag) if tag is not None: return tag[index] return None def endianSwapUint16(val): """Endian swaps magic and ctrl.""" return struct.unpack("<H", struct.pack(">H", val))[0] class FrameError(Exception): """Class for Frame Error Exception.""" pass def rscpEncode(tagStr, typeStr=None, data=None): """RSCP encodes data.""" if isinstance(tagStr, tuple): typeStr = tagStr[1] data = tagStr[2] tagStr = tagStr[0] else: if typeStr is None: raise TypeError("Second argument must not be none if first is not a tuple") tagHex = rscpTags.getHexTag(tagStr) typeHex = rscpTags.getHexDatatype(typeStr) if type(data) is str: data = data.encode("utf-8") packFmt = ( "<IBH" # format of header: little-endian, Uint32 tag, Uint8 type, Uint16 length ) headerLen = struct.calcsize(packFmt) if typeStr == "None": # special case: no content return struct.pack(packFmt, tagHex, typeHex, 0) elif ( typeStr == "Timestamp" ): # timestamp has a special format, divided into 32 bit integers ts = int(data / 1000) # this is int64 ms = (data - ts * 1000) * 1e6 # ms are multiplied by 10^6 hiword = ts >> 32 loword = ts & 0xFFFFFFFF packFmt += "iii" length = struct.calcsize(packFmt) - headerLen return struct.pack(packFmt, tagHex, typeHex, length, hiword, loword, ms) elif typeStr == "Container": if isinstance(data, list): newData = b"" for dataChunk in data: newData += rscpEncode( dataChunk[0], dataChunk[1], dataChunk[2] ) # transform each dataChunk into byte array data = newData packFmt += str(len(data)) + packFmtDict_VarSize[typeStr] elif typeStr in packFmtDict_FixedSize: packFmt += packFmtDict_FixedSize[typeStr] elif typeStr in packFmtDict_VarSize: packFmt += str(len(data)) + packFmtDict_VarSize[typeStr] length = struct.calcsize(packFmt) - headerLen return struct.pack(packFmt, tagHex, typeHex, length, data) def rscpFrame(data): """Generates RSCP frame.""" magic = endianSwapUint16(0xE3DC) ctrl = endianSwapUint16(0x11) t = time.time() sec1 = math.ceil(t) sec2 = 0 ns = round((t - int(t)) * 1000) length = len(data) packFmt = "<HHIIIH" + str(length) + "s" frame = struct.pack(packFmt, magic, ctrl, sec1, sec2, ns, length, data) crc = zlib.crc32(frame) % (1 << 32) # unsigned crc32 frame += struct.pack("<I", crc) return frame def rscpFrameDecode(frameData, returnFrameLen=False): """Decodes RSCP Frame.""" headerFmt = "<HHIIIH" crcFmt = "I" crc = None magic, ctrl, sec1, sec2, ns, length = struct.unpack( headerFmt, frameData[: struct.calcsize(headerFmt)] ) magic = endianSwapUint16(magic) ctrl = endianSwapUint16(ctrl) if ctrl & 0x10: # crc enabled totalLen = struct.calcsize(headerFmt) + length + struct.calcsize(crcFmt) data, crc = struct.unpack( "<" + str(length) + "s" + crcFmt, frameData[struct.calcsize(headerFmt) : totalLen], ) else: totalLen = struct.calcsize(headerFmt) + length data = struct.unpack( "<" + str(length) + "s", frameData[struct.calcsize(headerFmt) : totalLen] )[0] # check crc if crc is not None: crcCalc = zlib.crc32(frameData[: -struct.calcsize("<" + crcFmt)]) % ( 1 << 32 ) # unsigned crc32 if crcCalc != crc: raise FrameError("CRC32 not validated") timestamp = sec1 + float(ns) / 1000 if returnFrameLen: return data, timestamp, totalLen else: return data, timestamp def rscpDecode(data): """Decodes RSCP data.""" headerFmt = ( "<IBH" # format of header: little-endian, Uint32 tag, Uint8 type, Uint16 length ) headerSize = struct.calcsize(headerFmt) magicCheckFmt = ">H" magic = struct.unpack(magicCheckFmt, data[: struct.calcsize(magicCheckFmt)])[0] if magic == 0xE3DC: # we have a frame: decode it # print "Decoding frame in rscpDecode" return rscpDecode(rscpFrameDecode(data)[0]) # decode header hexTag, hexType, length = struct.unpack( headerFmt, data[: struct.calcsize(headerFmt)] ) # print (hex(hexTag), hex(hexType), length, data[struct.calcsize(headerFmt):]) strTag = rscpTags.getTag(hexTag) strType = rscpTags.getDatatype(hexType) if strType == "Container": # this is a container: parse the inside dataList = [] curByte = headerSize while curByte < headerSize + length: innerData, usedLength = rscpDecode(data[curByte:]) curByte += usedLength dataList.append(innerData) return (strTag, strType, dataList), curByte elif strType == "Timestamp": fmt = "<iii" hiword, loword, ms = struct.unpack( fmt, data[headerSize : headerSize + struct.calcsize(fmt)] ) # t = float((hiword << 32) + loword) + (float(ms)*1e-9) # this should work, but doesn't t = float(hiword + loword) + (float(ms) * 1e-9) # this seems to be correct return (strTag, strType, t), headerSize + struct.calcsize(fmt) elif strType == "None": return (strTag, strType, None), headerSize elif strType in packFmtDict_FixedSize: fmt = "<" + packFmtDict_FixedSize[strType] elif strType in packFmtDict_VarSize: fmt = "<" + str(length) + packFmtDict_VarSize[strType] val = struct.unpack(fmt, data[headerSize : headerSize + struct.calcsize(fmt)])[0] if strType == "Error": val = rscpTags.getErrorcode(int.from_bytes(val, "little")) elif isinstance(val, bytes) and strType == "CString": # return string instead of bytes # ignore none utf-8 bytes val = val.decode("utf-8", "ignore") return (strTag, strType, val), headerSize + struct.calcsize(fmt)
<reponame>bulletRush/QCloud_yunapi_wrapper #!/usr/bin/env python class BaseModule(object): def __init__(self, engine): self.engine = engine class BaseMonitorDimension(object): METRIC_NAME = None NAME_SPACE = None def __init__(self, kwargs): kwargs.pop("self", 0) self.kwargs = kwargs def to_list(self): l = [] for k, v in self.kwargs.items(): l.append({"name": k, "value": v}) return l
def custom_collate_fn(batches): def padding_tensor(sequences): num = len(sequences) max_len = max([s.size(1) for s in sequences]) + 1 out_dims = (num, max_len) out_tensor = sequences[0].data.new(*out_dims).fill_(0) mask = sequences[0].data.new(*out_dims).fill_(0) for i, tensor in enumerate(sequences): length = tensor.size(1) out_tensor[i, :length] = tensor mask[i, :length] = 1 return out_tensor, mask min_batch_src = [batch.src for batch in batches] min_batch_tgt = [batch.tgt for batch in batches] src, src_mask = padding_tensor(min_batch_src) tgt, tgt_mask = padding_tensor(min_batch_tgt) return Batch(src=src, trg=tgt, pad=batches[0].pad)