content
stringlengths 10
4.9M
|
---|
<reponame>hunshikan/corant
/*
* Copyright (c) 2013-2018, Bingo.Chen (<EMAIL>).
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.corant.modules.jta.narayana.objectstore;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.arjuna.ats.arjuna.common.ObjectStoreEnvironmentBean;
import com.arjuna.ats.arjuna.common.Uid;
import com.arjuna.ats.arjuna.exceptions.ObjectStoreException;
import com.arjuna.ats.arjuna.objectstore.ObjectStoreAPI;
import com.arjuna.ats.arjuna.objectstore.StateStatus;
import com.arjuna.ats.arjuna.state.InputBuffer;
import com.arjuna.ats.arjuna.state.InputObjectState;
import com.arjuna.ats.arjuna.state.OutputBuffer;
import com.arjuna.ats.arjuna.state.OutputObjectState;
import com.arjuna.ats.internal.arjuna.objectstore.jdbc.JDBCImple_driver;
/**
* corant-modules-jta-narayana
*
* @author bingo 下午4:52:52
*
*/
public class AbstractDomainObjectStore implements ObjectStoreAPI {
protected static final String DEFAULT_TABLE_NAME = "JBossTSTxTable";
protected Map<String, String> storeNames = new HashMap<>();
protected JDBCImple_driver _theImple;
protected String tableName;
protected final ObjectStoreEnvironmentBean jdbcStoreEnvironmentBean;
protected String _storeName;
/**
* Create a new JDBCStore
*
* @param jdbcStoreEnvironmentBean The environment bean containing the configuration
* @throws ObjectStoreException In case the store environment bean was not correctly configured
*/
public AbstractDomainObjectStore(ObjectStoreEnvironmentBean jdbcStoreEnvironmentBean)
throws ObjectStoreException {
this.jdbcStoreEnvironmentBean = jdbcStoreEnvironmentBean;
}
@Override
public boolean allObjUids(String s, InputObjectState buff) throws ObjectStoreException {
return allObjUids(s, buff, StateStatus.OS_UNKNOWN);
}
@Override
public boolean allObjUids(String tName, InputObjectState state, int match)
throws ObjectStoreException {
return _theImple.allObjUids(tName, state, match);
}
@Override
public boolean allTypes(InputObjectState foundTypes) throws ObjectStoreException {
return _theImple.allTypes(foundTypes);
}
@Override
public boolean commit_state(Uid objUid, String tName) throws ObjectStoreException {
return _theImple.commit_state(objUid, tName);
}
@Override
public int currentState(Uid objUid, String tName) throws ObjectStoreException {
return _theImple.currentState(objUid, tName);
}
/**
* Does this store need to do the full write_uncommitted/commit protocol?
*
* @return <code>true</code> if full commit is needed, <code>false</code> otherwise.
*/
@Override
public boolean fullCommitNeeded() {
return true;
}
@Override
public String getStoreName() {
return _storeName;
}
@Override
public boolean hide_state(Uid objUid, String tName) throws ObjectStoreException {
return _theImple.hide_state(objUid, tName);
}
/**
* Is the current state of the object the same as that provided as the last parameter?
*
* @param u The object to work on.
* @param tn The type of the object.
* @param st The expected type of the object.
*
* @return <code>true</code> if the current state is as expected, <code>false</code> otherwise.
*/
@Override
public boolean isType(Uid u, String tn, int st) throws ObjectStoreException {
return currentState(u, tn) == st;
}
public synchronized void packInto(OutputBuffer buff) throws IOException {
buff.packString(tableName);
}
@Override
public InputObjectState read_committed(Uid storeUid, String tName) throws ObjectStoreException {
return _theImple.read_state(storeUid, tName, StateStatus.OS_COMMITTED);
}
@Override
public InputObjectState read_uncommitted(Uid storeUid, String tName) throws ObjectStoreException {
return _theImple.read_state(storeUid, tName, StateStatus.OS_UNCOMMITTED);
}
@Override
public boolean remove_committed(Uid storeUid, String tName) throws ObjectStoreException {
return _theImple.remove_state(storeUid, tName, StateStatus.OS_COMMITTED);
}
@Override
public boolean remove_uncommitted(Uid storeUid, String tName) throws ObjectStoreException {
return _theImple.remove_state(storeUid, tName, StateStatus.OS_UNCOMMITTED);
}
@Override
public boolean reveal_state(Uid objUid, String tName) throws ObjectStoreException {
return _theImple.reveal_state(objUid, tName);
}
@Override
public void start() {}
@Override
public void stop() {}
/**
* Some object store implementations may be running with automatic sync disabled. Calling this
* method will ensure that any states are flushed to disk.
*/
@Override
public void sync() throws java.io.SyncFailedException, ObjectStoreException {}
public synchronized void unpackFrom(InputBuffer buff) throws IOException {
tableName = buff.unpackString();
}
@Override
public boolean write_committed(Uid storeUid, String tName, OutputObjectState state)
throws ObjectStoreException {
return _theImple.write_state(storeUid, tName, state, StateStatus.OS_COMMITTED);
}
@Override
public boolean write_uncommitted(Uid storeUid, String tName, OutputObjectState state)
throws ObjectStoreException {
return _theImple.write_state(storeUid, tName, state, StateStatus.OS_UNCOMMITTED);
}
}
|
def accuracyViews(outputs, targets, classes, views=(4, 8, 12)):
with torch.no_grad():
_, preds = outputs.topk(1, 1)
preds = preds.view(-1)
accs = []
for view in views:
preds_n = preds // (classes / view)
targets_n = targets // (classes / view)
correts_n = torch.eq(preds_n, targets_n)
acc = correts_n.float().mean() * 100
accs.append(acc)
return accs |
package rpc
import (
"context"
"encoding/hex"
"fmt"
"testing"
ics23 "github.com/confio/ics23/go"
"github.com/cosmos/iavl"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
dbm "github.com/tendermint/tm-db"
abci "github.com/line/ostracon/abci/types"
"github.com/line/ostracon/crypto/merkle"
"github.com/line/ostracon/libs/bytes"
lcmock "github.com/line/ostracon/light/rpc/mocks"
tmcrypto "github.com/line/ostracon/proto/ostracon/crypto"
rpcmock "github.com/line/ostracon/rpc/client/mocks"
ctypes "github.com/line/ostracon/rpc/core/types"
"github.com/line/ostracon/types"
)
// TestABCIQuery tests ABCIQuery requests and verifies proofs. HAPPY PATH 😀
func TestABCIQuery(t *testing.T) {
tree, err := iavl.NewMutableTree(dbm.NewMemDB(), 100)
require.NoError(t, err)
var (
key = []byte("foo")
value = []byte("bar")
)
tree.Set(key, value)
commitmentProof, err := tree.GetMembershipProof(key)
require.NoError(t, err)
op := &testOp{
Spec: ics23.IavlSpec,
Key: key,
Proof: commitmentProof,
}
next := &rpcmock.Client{}
next.On(
"ABCIQueryWithOptions",
context.Background(),
mock.AnythingOfType("string"),
bytes.HexBytes(key),
mock.AnythingOfType("client.ABCIQueryOptions"),
).Return(&ctypes.ResultABCIQuery{
Response: abci.ResponseQuery{
Code: 0,
Key: key,
Value: value,
Height: 1,
ProofOps: &tmcrypto.ProofOps{
Ops: []tmcrypto.ProofOp{op.ProofOp()},
},
},
}, nil)
lc := &lcmock.LightClient{}
appHash, _ := hex.DecodeString("5EFD44055350B5CC34DBD26085347A9DBBE44EA192B9286A9FC107F40EA1FAC5")
lc.On("VerifyLightBlockAtHeight", context.Background(), int64(2), mock.AnythingOfType("time.Time")).Return(
&types.LightBlock{
SignedHeader: &types.SignedHeader{
Header: &types.Header{AppHash: appHash},
},
},
nil,
)
c := NewClient(next, lc,
KeyPathFn(func(_ string, key []byte) (merkle.KeyPath, error) {
kp := merkle.KeyPath{}
kp = kp.AppendKey(key, merkle.KeyEncodingURL)
return kp, nil
}))
c.RegisterOpDecoder("ics23:iavl", testOpDecoder)
res, err := c.ABCIQuery(context.Background(), "/store/accounts/key", key)
require.NoError(t, err)
assert.NotNil(t, res)
}
type testOp struct {
Spec *ics23.ProofSpec
Key []byte
Proof *ics23.CommitmentProof
}
var _ merkle.ProofOperator = testOp{}
func (op testOp) GetKey() []byte {
return op.Key
}
func (op testOp) ProofOp() tmcrypto.ProofOp {
bz, err := op.Proof.Marshal()
if err != nil {
panic(err.Error())
}
return tmcrypto.ProofOp{
Type: "ics23:iavl",
Key: op.Key,
Data: bz,
}
}
func (op testOp) Run(args [][]byte) ([][]byte, error) {
// calculate root from proof
root, err := op.Proof.Calculate()
if err != nil {
return nil, fmt.Errorf("could not calculate root for proof: %v", err)
}
// Only support an existence proof or nonexistence proof (batch proofs currently unsupported)
switch len(args) {
case 0:
// Args are nil, so we verify the absence of the key.
absent := ics23.VerifyNonMembership(op.Spec, root, op.Proof, op.Key)
if !absent {
return nil, fmt.Errorf("proof did not verify absence of key: %s", string(op.Key))
}
case 1:
// Args is length 1, verify existence of key with value args[0]
if !ics23.VerifyMembership(op.Spec, root, op.Proof, op.Key, args[0]) {
return nil, fmt.Errorf("proof did not verify existence of key %s with given value %x", op.Key, args[0])
}
default:
return nil, fmt.Errorf("args must be length 0 or 1, got: %d", len(args))
}
return [][]byte{root}, nil
}
func testOpDecoder(pop tmcrypto.ProofOp) (merkle.ProofOperator, error) {
proof := &ics23.CommitmentProof{}
err := proof.Unmarshal(pop.Data)
if err != nil {
return nil, err
}
op := testOp{
Key: pop.Key,
Spec: ics23.IavlSpec,
Proof: proof,
}
return op, nil
}
|
def receptive_field(self, xg, yg, rftype='square', size=None):
if size is None:
size = 2 * self.radius
if rftype == 'square':
rf = np.zeros(xg.shape).astype(np.float32)
ind = np.where((xg > self.x_center - (size / 2.0))
& (xg < self.x_center + (size / 2.0))
& (yg > self.y_center - (size / 2.0))
& (yg < self.y_center + (size / 2.0)))
rf[ind] = 1.0
elif rftype == 'gaussian':
dist = (xg - self.x_center) ** 2 + (yg - self.y_center) ** 2
rf = np.exp(-dist / (2 * size ** 2))
rf /= np.sum(rf)
else:
e_s = "Acceptable values for `rftype` are 'square' or 'gaussian'"
raise ValueError(e_s)
return rf |
/**
* cairo::createVirtualDevice: Create a VCL virtual device for the CGContext in the cairo Surface
*
* @return The new virtual device
**/
boost::shared_ptr<VirtualDevice> Os2Surface::createVirtualDevice() const
{
SystemGraphicsData aSystemGraphicsData;
aSystemGraphicsData.nSize = sizeof(SystemGraphicsData);
OSL_TRACE( "Os2Surface::createVirtualDevice");
return boost::shared_ptr<VirtualDevice>(
new VirtualDevice( &aSystemGraphicsData, sal::static_int_cast<USHORT>(getDepth()) ));
} |
/**
* Multiplies the 3D point by the supplied 3D point using cross (vector) product.
* Formula taken https://www.movable-type.co.uk/scripts/latlong-vectors.html
* @param other - GeoName containing 3D point to be crossed with this GeoName's 3D point.
* @return cross product's length
*/
private double cross(GeoName other) {
double x = (this.point[Y] * other.point[Z]) - (this.point[Z] * other.point[Y]);
double y = (this.point[Z] * other.point[X]) - (this.point[X] * other.point[Z]);
double z = (this.point[X] * other.point[Y]) - (this.point[Y] * other.point[X]);
return sqrt((x * x) + (y * y) + (z * z));
} |
/** Base class for Vulkan dispatchable handles. */
abstract class DispatchableHandleInstance extends Pointer.Default {
private final VKCapabilitiesInstance capabilities;
DispatchableHandleInstance(long handle, VKCapabilitiesInstance capabilities) {
super(handle);
this.capabilities = capabilities;
}
/** Returns the {@link VKCapabilitiesInstance} instance associated with this dispatchable handle. */
public VKCapabilitiesInstance getCapabilities() {
return capabilities;
}
} |
def open_asm2(filename):
datapath = os.path.abspath(filename + '.asm')
with open(datapath, 'rb') as f:
asm = [line for line in f.readlines()]
return asm |
<reponame>henrythasler/telescope-sync
#ifndef LEDMANAGER_H
#define LEDMANAGER_H
#ifdef ARDUINO
#include <Arduino.h>
class LEDManager
{
public:
typedef enum
{
OFF,
ON,
BLINK_10HZ,
BLINK_1HZ,
FLASH_4X_EVERY_5S,
FLASH_2X_EVERY_5S,
FLASH_1X_EVERY_5S,
READY,
} LEDMode;
typedef struct
{
bool startMode;
uint8_t timing[16];
} LEDTiming;
LEDManager(uint8_t pin=LED_BUILTIN, LEDMode mode=LEDMode::OFF);
void setMode(LEDMode mode);
void update(uint32_t micros=0);
private:
uint8_t pin = LED_BUILTIN;
LEDMode mode = LEDMode::OFF;
typedef struct
{
uint32_t on[6] = {0};
uint32_t off[6] = {0};
} Timing;
Timing timing[32]; // support 32 states
};
#endif
#endif // LEDMANAGER_H |
/**
* An implementation of SynchronizerRepository for a Zip file.
*/
public static class ZipRepository implements SynchronizerRepository {
private final File zip_file_ob;
private final ZipFile zip_file;
private List<String> zip_dir_list;
private List<ZipFileObject> zip_file_list;
public ZipRepository(File zip_file_ob) throws IOException {
this.zip_file_ob = zip_file_ob;
this.zip_file = new ZipFile(zip_file_ob);
}
public void init() {
List<ZipFileObject> file_obs = new ArrayList();
Set<String> directories = new HashSet();
directories.add("/");
// Build the directory table,
Enumeration<? extends ZipEntry> entries = zip_file.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
String name = entry.getName();
if (!entry.isDirectory()) {
// Some zip files don't have directory entries, so we need to make
// sure all the sub-directories of a file are included,
String path_str = "/" + name;
while (true) {
int delim = path_str.lastIndexOf("/");
if (delim <= 0) break;
path_str = path_str.substring(0, delim + 1);
directories.add(path_str);
path_str = path_str.substring(0, delim);
}
file_obs.add(new ZipFileObject(zip_file, entry));
}
else {
directories.add("/" + name);
}
}
// Add all the directories to a directory list,
List<String> dir_list = new ArrayList(directories.size());
dir_list.addAll(directories);
// Sort the file list,
Collections.sort(file_obs);
// Sort the directory list,
Collections.sort(dir_list);
zip_dir_list = dir_list;
zip_file_list = file_obs;
}
@Override
public List<SynchronizerFile> allFiles(String path) {
List<SynchronizerFile> out_list = new ArrayList();
int p = Collections.binarySearch(zip_file_list, path, ZIP_PATH_COMPARATOR);
if (p < 0) {
p = -(p + 1);
}
int sz = zip_file_list.size();
while (p < sz) {
ZipFileObject f = zip_file_list.get(p);
String fp = f.getPath();
if (!fp.startsWith(path)) {
break;
}
if (fp.equals(path)) {
out_list.add(f);
}
++p;
}
return out_list;
}
@Override
public List<String> allSubDirectories(String path) {
// System.out.println(" subDirs = '" + path + "'");
// System.out.println(" zip_dir_list = " + zip_dir_list);
List<String> out_list = new ArrayList();
int p = Collections.binarySearch(zip_dir_list, path);
if (p < 0) {
p = -(p + 1);
}
else {
p = p + 1;
}
// System.out.println(" p = " + p);
int sz = zip_dir_list.size();
while (p < sz) {
String d = zip_dir_list.get(p);
int delim = d.lastIndexOf("/", d.length() - 2);
String parent_path = d.substring(0, delim + 1);
if (!parent_path.startsWith(path)) {
break;
}
if (parent_path.equals(path)) {
out_list.add(d.substring(delim + 1));
}
++p;
}
// System.out.println(" out_list = " + out_list);
return out_list;
}
@Override
public SynchronizerFile getFileObject(String path, String file_name) {
String absolute_name = path + file_name;
int p = Collections.binarySearch(zip_file_list,
absolute_name, ZIP_PATH_COMPARATOR);
if (p >= 0) {
return zip_file_list.get(p);
}
throw new RuntimeException("File not found");
}
@Override
public void writeCopyOf(SynchronizerFile file_ob, String path) throws IOException {
// Zip files are read-only.
throw new UnsupportedOperationException();
}
@Override
public boolean hasDirectory(String path) {
int p = Collections.binarySearch(zip_dir_list, path);
return (p >= 0);
}
@Override
public void makeDirectory(String path) {
// Zip files are read-only.
throw new UnsupportedOperationException();
}
@Override
public void removeDirectory(String path) {
// Zip files are read-only.
throw new UnsupportedOperationException();
}
} |
/**
* Checks each CharPair and gets the length for a String composed
* with the two characters of this CharPair
*/
static int checksCharPair(String s,int sLen,CharPair cp){
StringBuilder sb=new StringBuilder();
for(int i=0;i<sLen;i++){
char thisChar=s.charAt(i);
if(thisChar==cp.getFirstChar() || thisChar==cp.getSecondChar())
sb.append(thisChar);
}
return getAlternationLen(sb,sb.length());
} |
<gh_stars>0
from .healpix_map_like import HEALPixMapLike
from .ligo_map_like import LIGOMapLike
__all__ = ['HEALPixMapLike', 'LIGOMapLike']
|
package cli
import (
"errors"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"rsprd.com/spread/pkg/project"
)
// SpreadCli is the spread command line client.
type SpreadCli struct {
// input stream (ie. stdin)
in io.ReadCloser
// output stream (ie. stdout)
out io.Writer
// error stream (ie. stderr)
err io.Writer
version string
workDir string
}
// NewSpreadCli returns a spread command line interface (CLI) client.NewSpreadCli.
// All functionality accessible from the command line is attached to this struct.
func NewSpreadCli(in io.ReadCloser, out, err io.Writer, version, workDir string) *SpreadCli {
return &SpreadCli{
in: in,
out: out,
err: err,
version: version,
workDir: workDir,
}
}
func (c SpreadCli) projectOrDie() *project.Project {
proj, err := c.project()
if err != nil {
c.fatalf("%v", err)
}
return proj
}
func (c SpreadCli) project() (*project.Project, error) {
if len(c.workDir) == 0 {
return nil, fmt.Errorf("Encountered error: %v", ErrNoWorkDir)
}
root, found := findPath(c.workDir, project.SpreadDirectory, true)
if !found {
return nil, fmt.Errorf("Not in a Spread project.")
}
proj, err := project.OpenProject(root)
if err != nil {
return nil, fmt.Errorf("Error opening project: %v", err)
}
return proj, nil
}
func (c SpreadCli) globalProject() (*project.Project, error) {
proj, err := project.Global()
if err != nil {
if strings.HasSuffix(err.Error(), "no such file or directory") {
return project.InitGlobal()
}
return nil, err
}
return proj, nil
}
func (c SpreadCli) printf(message string, data ...interface{}) {
// add newline if doesn't have one
if !strings.HasSuffix(message, "\n") {
message = message + "\n"
}
fmt.Fprintf(c.out, message, data...)
}
func (c SpreadCli) fatalf(message string, data ...interface{}) {
c.printf(message, data...)
os.Exit(1)
}
func findPath(leafDir, targetFile string, dir bool) (string, bool) {
if len(leafDir) == 0 {
return "", false
}
spread := filepath.Join(leafDir, targetFile)
if exists, err := pathExists(spread, dir); err == nil && exists {
return spread, true
} else {
if leafDir == "/" {
return "", false
}
parent := filepath.Dir(leafDir)
return findPath(parent, targetFile, dir)
}
}
func pathExists(path string, dir bool) (bool, error) {
info, err := os.Stat(path)
if err == nil {
return info.IsDir() == dir, nil
}
if os.IsNotExist(err) {
return false, nil
}
return true, err
}
var (
// ErrNoWorkDir is returned when the CLI was started without a working directory set.
ErrNoWorkDir = errors.New("no working directory was set")
)
|
<reponame>noSTALKER/nxtcore
#include "catch.hpp"
#include "../include/Container/RingBuffer.h"
TEST_CASE("RingBuffer Tests", "[ring_buffer]") {
SECTION("popFront() and pushBack() tests") {
nxt::core::RingBuffer<int> ring_buffer;
ring_buffer.pushBack(1);
REQUIRE(ring_buffer.size() == 1);
REQUIRE(ring_buffer.front() == 1);
REQUIRE(ring_buffer.capacity() >= 1);
REQUIRE(nxt::core::isPowerOf2(ring_buffer.capacity()));
ring_buffer.pushBack(2);
ring_buffer.pushBack(3);
ring_buffer.pushBack(4);
REQUIRE(ring_buffer.size() == 4);
REQUIRE(ring_buffer.front() == 1);
REQUIRE(ring_buffer.capacity() >= 4);
REQUIRE(nxt::core::isPowerOf2(ring_buffer.capacity()));
ring_buffer.pushBack(5);
ring_buffer.pushBack(6);
ring_buffer.pushBack(7);
REQUIRE(ring_buffer.size() == 7);
REQUIRE(ring_buffer.front() == 1);
REQUIRE(ring_buffer.capacity() >= 7);
REQUIRE(nxt::core::isPowerOf2(ring_buffer.capacity()));
REQUIRE(ring_buffer.popFrontAndExtract() == 1);
REQUIRE(ring_buffer.size() == 6);
REQUIRE(ring_buffer.front() == 2);
REQUIRE(ring_buffer.capacity() >= 6);
REQUIRE(nxt::core::isPowerOf2(ring_buffer.capacity()));
ring_buffer.popFront();
ring_buffer.popFront();
REQUIRE(ring_buffer.size() == 4);
REQUIRE(ring_buffer.front() == 4);
REQUIRE(ring_buffer.capacity() >= 4);
REQUIRE(nxt::core::isPowerOf2(ring_buffer.capacity()));
ring_buffer.pushBack(8);
ring_buffer.pushBack(9);
ring_buffer.pushBack(10);
ring_buffer.pushBack(11);
ring_buffer.pushBack(12);
REQUIRE(ring_buffer.size() == 9);
REQUIRE(ring_buffer.front() == 4);
REQUIRE(ring_buffer.capacity() >= 9);
REQUIRE(nxt::core::isPowerOf2(ring_buffer.capacity()));
}
SECTION("Copy Constructor tests") {
nxt::core::RingBuffer<int> ring_buffer;
ring_buffer.pushBack(1);
ring_buffer.pushBack(2);
ring_buffer.pushBack(3);
ring_buffer.pushBack(4);
ring_buffer.pushBack(5);
ring_buffer.pushBack(6);
ring_buffer.pushBack(7);
REQUIRE(ring_buffer.size() == 7);
REQUIRE(ring_buffer.front() == 1);
REQUIRE(ring_buffer.capacity() >= 7);
REQUIRE(nxt::core::isPowerOf2(ring_buffer.capacity()));
nxt::core::RingBuffer copy_buffer = ring_buffer;
REQUIRE(copy_buffer.size() == 7);
REQUIRE(copy_buffer.front() == 1);
REQUIRE(copy_buffer.capacity() >= 7);
REQUIRE(nxt::core::isPowerOf2(copy_buffer.capacity()));
copy_buffer.popFront();
copy_buffer.popFront();
copy_buffer.popFront();
copy_buffer.pushBack(8);
copy_buffer.pushBack(9);
copy_buffer.pushBack(10);
copy_buffer.pushBack(11);
copy_buffer.pushBack(12);
REQUIRE(copy_buffer.size() == 9);
REQUIRE(copy_buffer.front() == 4);
REQUIRE(copy_buffer.capacity() >= 9);
REQUIRE(nxt::core::isPowerOf2(copy_buffer.capacity()));
nxt::core::RingBuffer copy_buffer_2 = copy_buffer;
REQUIRE(copy_buffer_2.size() == 9);
REQUIRE(copy_buffer_2.front() == 4);
REQUIRE(copy_buffer_2.capacity() >= 9);
REQUIRE(nxt::core::isPowerOf2(copy_buffer_2.capacity()));
nxt::core::RingBuffer move_buffer = std::move(copy_buffer_2);
REQUIRE(move_buffer.size() == 9);
REQUIRE(move_buffer.front() == 4);
REQUIRE(move_buffer.capacity() >= 9);
REQUIRE(nxt::core::isPowerOf2(move_buffer.capacity()));
REQUIRE(copy_buffer_2.size() == 0);
REQUIRE(copy_buffer_2.capacity() == 0);
}
}
|
def format_goto_rommon(self, timeout=300):
self.go_to('fxos_state')
self.go_to('local_mgmt_state')
self.spawn_id.sendline('format everything')
d = Dialog([
['Do you still want to format', 'sendline(yes)', None, False, False],
])
d.process(self.spawn_id, timeout=30)
self.wait_for_rommon(timeout=timeout) |
/**
* Returns an ASCII hex representation of the MD5 of the content of 'file'.
*/
private static String md5(File file) {
byte[] digest = null;
try {
MessageDigest digester = MessageDigest.getInstance("MD5");
byte[] bytes = new byte[8192];
FileInputStream in = new FileInputStream(file);
try {
int byteCount;
while ((byteCount = in.read(bytes)) > 0) {
digester.update(bytes, 0, byteCount);
}
digest = digester.digest();
} finally {
in.close();
}
} catch (Exception cause) {
throw new RuntimeException("Unable to compute MD5 of \"" + file + "\"", cause);
}
return (digest == null) ? null : byteArrayToHexString(digest);
} |
import './capsule-tabs.less'
import { CapsuleTab, CapsuleTabs } from './capsule-tabs'
import { attachPropertiesToComponent } from '../../utils/attach-properties-to-component'
export type { CapsuleTabsProps, CapsuleTabProps } from './capsule-tabs'
export default attachPropertiesToComponent(CapsuleTabs, {
Tab: CapsuleTab,
})
|
<gh_stars>0
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { ChildService } from '../../../../@core/services/child.service';
import { switchMap } from 'rxjs/operators';
import { Child } from '../../../../@core/models/child';
import { UsersService } from '../../../../@core/services/users.service';
import { User, USERROLE } from '../../../../@core/models/user';
import { NbDialogService } from '@nebular/theme';
import { YesNoDialogComponent } from '../../../../components/yes-no-dialog/yes-no-dialog.component';
import { group } from 'console';
import { ToastService } from '../../../../@core/services/toast.service';
@Component({
selector: 'ngx-child-detail',
templateUrl: './child-detail.component.html',
styleUrls: ['./child-detail.component.scss']
})
export class ChildDetailComponent implements OnInit {
childId:number;
currentUser:User;
child:Child;
constructor(
private route:ActivatedRoute,
private router:Router,
private userService:UsersService,
private childService:ChildService,
private toastService:ToastService,
private dialogService:NbDialogService
) {
this.userService.getCurrentUser().subscribe((user:User)=>{this.currentUser = user;})
this.route.paramMap.pipe(switchMap(
params => {
this.childId = Number(params.get('childId'));
return this.childService.getChildById(this.childId);
}
)).subscribe((child:Child) => {
this.child = child;
})
}
ngOnInit(): void {
}
onSiblingClick(sibling:Child){
this.router.navigate([`../${sibling.id}`],{relativeTo:this.route});
}
onRemoveSiblingClick(){
this.dialogService.open(YesNoDialogComponent,{context:{
title:'This child will be removed from sibling. Are you going to continue?'
}}).onClose.subscribe(ret=>{
if(ret==true)
this.childService.RemoveChildFromSibling(this.child).subscribe( groupId=>{
this.child.sibling_group = groupId;
this.child.siblings=[];
})
})
}
onAddSiblingClick(){
this.router.navigate(['addsiblings'],{relativeTo:this.route});
}
onSetPWDClick(){
this.router.navigate(['setpwd'],{relativeTo:this.route});
}
back(){
this.router.navigate(['/children']);
}
isTeacher(){
return this.currentUser.role == USERROLE.Teacher;
}
getPhotoOfChild(){
if(this.child)
return this.child.photo? this.child.photo:'';
return '';
}
isAdmin(user:User){
return user.role == USERROLE.Admin;
}
onDelete(){
this.dialogService.open(YesNoDialogComponent,{context:{
title:'Are you sure?'
}}).onClose.subscribe(ret=>{
if(ret==true)
this.childService.deleteChild(this.childId).subscribe(_=>{
this.toastService.warning('Child has been deleted','Delete');
this.router.navigate(['..'],{relativeTo:this.route});
})
})
}
}
|
// SetResponseHeaders sets the response headers being sent back to the client
func SetResponseHeaders(w http.ResponseWriter, authToken string, apiKey string) http.ResponseWriter {
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
w.Header().Add("Access-Control-Allow-Headers", "Content-Type, Auth-Token, API-Key")
w.Header().Add("Access-Control-Expose-Headers", "Content-Type, Auth-Token, API-Key")
w.Header().Add("Access-Control-Allow-Origin", "*")
w.Header().Add("Access-Control-Allow-Methods", "GET,DELETE,POST,PATCH")
if authToken != "" {
w.Header().Add("Auth-Token", authToken)
}
if apiKey != "" {
w.Header().Add("API-Key", apiKey)
}
return w
} |
/**
* Class with only static methods used for evaluation of expressions. Does:
* - replace ${property.name} with the actual property value (from System.getProperty())
* - replace ${property.name : default.value} with property value or default.value if the property is not defined
* - evaluate infix expressions inside #{ expression } block, available operators are:
* '+' (addition), '-' (subtraction), '*' (multiplying), '/' (division), '%' (modulo operation),
* '..' (range generation), ',' (adding to list), '(' and ')' as usual parentheses.
*
* Examples:
* #{ 1..3,5 } -> 1,2,3,5
* #{ ( ${x} + 5 ) * 6 } with -Dx=2 -> 42
* foo${y}bar with -Dy=goo -> foogoobar
*
* @author Radim Vansa <[email protected]>
*/
public class Evaluator {
private final static Log log = LogFactory.getLog(Evaluator.class);
/**
* Parse string possibly containing expressions and properties and convert the value to integer.
*/
public static int parseInt(String string) {
return Integer.parseInt(parseString(string));
}
/**
* Parse string possibly containing expressions and properties.
*/
public static String parseString(String string) {
if (string == null) return null;
StringBuilder sb = new StringBuilder();
int currentIndex = 0;
while (currentIndex < string.length()) {
int propertyIndex = string.indexOf("${", currentIndex);
int expressionIndex = string.indexOf("#{", currentIndex);
int nextIndex = propertyIndex < 0 ?
(expressionIndex < 0 ? string.length() : expressionIndex) :
(expressionIndex < 0 ? propertyIndex : Math.min(expressionIndex, propertyIndex));
sb.append(string.substring(currentIndex, nextIndex));
currentIndex = nextIndex + 2;
if (nextIndex == propertyIndex) {
nextIndex = string.indexOf('}', currentIndex);
if (nextIndex < 0) {
throw new IllegalArgumentException(string);
}
sb.append(evalProperty(string, currentIndex, nextIndex));
currentIndex = nextIndex + 1;
} else if (nextIndex == expressionIndex) {
Stack<Operator> operators = new Stack<>();
Stack<Value> operands = new Stack<>();
Tokenizer tokenizer = new Tokenizer(string, Operator.symbols(), true, false, currentIndex);
boolean closed = false;
// we set this to true because if '-' is on the beginning, is interpreted as sign
boolean lastTokenIsOperator = true;
boolean negativeSign = false;
while (tokenizer.hasMoreTokens()) {
String token = tokenizer.nextToken();
Operator op = Operator.from(token);
if (op == null) {
operands.push(new Value(negativeSign ? "-" + token : token));
lastTokenIsOperator = false;
continue;
} else if (op.isWhite()) {
// do not set lastTokenIsOperator
continue;
} else if (op == Operator.OPENVAR) {
if (!tokenizer.hasMoreTokens()) throw new IllegalArgumentException(string);
StringBuilder var = new StringBuilder();
while (tokenizer.hasMoreTokens()) {
token = tokenizer.nextToken();
if ((op = Operator.from(token)) == null || op.isWhite()) {
var.append(token);
} else {
break;
}
}
if (op != Operator.CLOSEVAR) {
throw new IllegalArgumentException("Expected '}' but found " + token + " in " + string);
}
operands.push(evalProperty(var.toString(), 0, var.length()));
lastTokenIsOperator = false;
continue;
} else if (op == Operator.CLOSEVAR) {
// end of expression to be evaluated
closed = true;
break;
} else if (op.isFunction()) {
operators.push(op);
} else if (op == Operator.OPENPAR) {
operators.push(op);
} else if (op == Operator.CLOSEPAR) {
while ((op = operators.pop()) != Operator.OPENPAR) {
op.exec(operands);
}
} else if (op == Operator.MINUS && lastTokenIsOperator) {
negativeSign = true;
} else {
while (true) {
if (operators.isEmpty() || operators.peek() == Operator.OPENPAR ||
operators.peek().precedence() < op.precedence()) {
operators.push(op);
break;
}
operators.pop().exec(operands);
}
}
lastTokenIsOperator = true;
}
if (!closed) {
throw new IllegalArgumentException("Expression is missing closing '}': " + string);
}
while (!operators.empty()) {
operators.pop().exec(operands);
}
sb.append(operands.pop());
if (!operands.empty()) {
throw new IllegalArgumentException(operands.size() + " operands not processed: top=" + operands.pop() + " all=" + operands);
}
currentIndex = tokenizer.getPosition();
}
}
return sb.toString();
}
private static Value evalProperty(String string, int startIndex, int endIndex) {
int colonIndex = string.indexOf(':', startIndex);
String property;
Value value = null, def = null;
if (colonIndex < 0 || colonIndex > endIndex) {
property = string.substring(startIndex, endIndex).trim();
} else {
property = string.substring(startIndex, colonIndex).trim();
def = new Value(string.substring(colonIndex + 1, endIndex).trim());
}
String strValue = System.getProperty(property);
if (strValue != null && !strValue.isEmpty()) {
value = new Value(strValue.trim());
} else {
if (property.startsWith("env.")) {
String env = System.getenv(property.substring(4));
if (env != null && !env.isEmpty()) {
value = new Value(env.trim());
}
} else if (property.startsWith("random.")) {
value = random(property);
}
}
if (value != null) {
return value;
} else if (def != null) {
return def;
} else {
log.debugf("Failed to resolve property ${%s}, defined properties are: ", property);
for (Map.Entry<Object, Object> prop : System.getProperties().entrySet()) {
log.debugf("${%s} -> '%s'", prop.getKey(), prop.getValue());
}
for (Map.Entry<String, String> env : System.getenv().entrySet()) {
log.debugf("${env.%s} -> '%s'", env.getKey(), env.getValue());
}
throw new IllegalArgumentException("Property '" + property + "' not defined!");
}
}
private static Value random(String type) {
Random random = new Random();
if (type.equals("random.int")) {
return new Value(random.nextInt() & Integer.MAX_VALUE);
} else if (type.equals("random.long")) {
return new Value(random.nextLong() & Long.MAX_VALUE);
} else if (type.equals("random.double")) {
return new Value(random.nextDouble());
} else if (type.equals("random.boolean")) {
return new Value(String.valueOf(random.nextBoolean()));
} else {
return null;
}
}
private static Value range(Value first, Value second) {
if (first.type.canBeLong() && second.type.canBeLong()) {
long from = first.getLong();
long to = second.getLong();
List<Value> values = new ArrayList((int) Math.abs(from - to));
long inc = from <= to ? 1 : -1;
for (long i = from; from <= to ? i <= to : i >= to; i += inc) values.add(new Value(i));
return new Value(values);
} else {
throw new IllegalArgumentException(first + " .. " + second);
}
}
private static Value multiply(Value first, Value second) {
if (first.type.canBeLong() && second.type.canBeLong()) {
return new Value(first.getLong() * second.getLong());
} else if (first.type.canBeDouble() && second.type.canBeDouble()) {
return new Value(first.getDouble() * second.getDouble());
} else {
throw new IllegalArgumentException(first + " * " + second);
}
}
private static Value minus(Value first, Value second) {
if (first.type.canBeLong() && second.type.canBeLong()) {
return new Value(first.getLong() - second.getLong());
} else if (first.type.canBeDouble() && second.type.canBeDouble()) {
return new Value(first.getDouble() - second.getDouble());
} else {
throw new IllegalArgumentException(first + " - " + second);
}
}
private static Value plus(Value first, Value second) {
if (first.type.canBeLong() && second.type.canBeLong()) {
return new Value(first.getLong() + second.getLong());
} else if (first.type.canBeDouble() && second.type.canBeDouble()) {
return new Value(first.getDouble() + second.getDouble());
} else {
throw new IllegalArgumentException(first + " + " + second);
}
}
private static Value div(Value first, Value second) {
if (first.type.canBeLong() && second.type.canBeLong()) {
return new Value(first.getLong() / second.getLong());
} else if (first.type.canBeDouble() && second.type.canBeDouble()) {
return new Value(first.getDouble() / second.getDouble());
} else {
throw new IllegalArgumentException(first + " / " + second);
}
}
private static Value modulo(Value first, Value second) {
if (first.type.canBeLong() && second.type.canBeLong()) {
return new Value(first.getLong() % second.getLong());
} else {
throw new IllegalArgumentException(first + " % " + second);
}
}
private static Value power(Value first, Value second) {
if (first.type.canBeLong() && second.type.canBeLong()) {
long base = first.getLong();
long power = second.getLong();
long value = 1;
if (power < 0) {
return new Value(Math.pow(base, power));
}
for (long i = power; i > 0; --i) {
value *= base;
}
return new Value(value);
} else if (first.type.canBeDouble() && second.type.canBeDouble()) {
return new Value(Math.pow(first.getDouble(), second.getDouble()));
} else {
throw new IllegalArgumentException(first + "^" + second);
}
}
private static Value concat(Value first, Value second) {
List<Value> list = new ArrayList();
if (first.type == ValueType.LIST) {
list.addAll(first.getList());
} else {
list.add(first);
}
if (second.type == ValueType.LIST) {
list.addAll(second.getList());
} else {
list.add(second);
}
return new Value(list);
}
private static Value max(Value value) {
if (value.type == ValueType.LIST) {
Value max = null;
for (Value v : value.getList()) {
if (max == null) {
max = v;
} else if (max.type.canBeLong() && v.type.canBeLong()) {
max = max.getLong() >= v.getLong() ? max : v;
} else if (max.type.canBeDouble() && v.type.canBeDouble()) {
max = max.getDouble() >= v.getDouble() ? max : v;
} else {
throw new IllegalArgumentException("max(" + value + ")");
}
}
if (max == null) {
throw new IllegalArgumentException("max of 0 values");
}
return max;
} else {
log.warn("Computing max from single value");
return value;
}
}
private static Value min(Value value) {
if (value.type == ValueType.LIST) {
Value min = null;
for (Value v : value.getList()) {
if (min == null) {
min = v;
} else if (min.type.canBeLong() && v.type.canBeLong()) {
min = min.getLong() <= v.getLong() ? min : v;
} else if (min.type.canBeDouble() && v.type.canBeDouble()) {
min = min.getDouble() <= v.getDouble() ? min : v;
} else {
throw new IllegalArgumentException("min(" + value + ")");
}
}
if (min == null) {
throw new IllegalArgumentException("min of 0 values");
}
return min;
} else {
log.warn("Computing min from single value");
return value;
}
}
private static Value floor(Value value) {
if (value.type.canBeLong()) {
return value;
} else if (value.type.canBeDouble()) {
return new Value((long) Math.floor(value.getDouble()));
} else {
throw new IllegalArgumentException("floor(" + value + ")");
}
}
private static Value ceil(Value value) {
if (value.type.canBeLong()) {
return value;
} else if (value.type.canBeDouble()) {
return new Value((long) Math.ceil(value.getDouble()));
} else {
throw new IllegalArgumentException("abs(" + value + ")");
}
}
private static Value abs(Value value) {
if (value.type.canBeLong()) {
return new Value(Math.abs(value.getLong()));
} else if (value.type.canBeDouble()) {
return new Value(Math.abs(value.getDouble()));
} else {
throw new IllegalArgumentException("abs(" + value + ")");
}
}
private enum ValueType {
STRING(false, false),
LONG(true, true),
DOUBLE(false, true),
LIST(false, false);
private final boolean canBeLong;
private final boolean canBeDouble;
ValueType(boolean canBeLong, boolean canBeDouble) {
this.canBeLong = canBeLong;
this.canBeDouble = canBeDouble;
}
public boolean canBeLong() {
return canBeLong;
}
public boolean canBeDouble() {
return canBeDouble;
}
}
private static class Value {
public final ValueType type;
private final double doubleValue;
private final long longValue;
private final Object objectValue;
private Value(long longValue) {
this.type = ValueType.LONG;
this.doubleValue = longValue;
this.longValue = longValue;
this.objectValue = String.valueOf(longValue);
}
private Value(double doubleValue) {
this.type = ValueType.DOUBLE;
this.doubleValue = doubleValue;
this.longValue = 0;
this.objectValue = String.valueOf(doubleValue);
}
private Value(String string) {
ValueType t = ValueType.STRING;
double d = 0;
long l = 0;
Object o = string;
try {
d = l = Long.parseLong(string);
o = l;
t = ValueType.LONG;
} catch (NumberFormatException e) {
try {
o = d = Double.parseDouble(string);
t = ValueType.DOUBLE;
} catch (NumberFormatException e2) {
}
}
type = t;
doubleValue = d;
longValue = l;
objectValue = o;
}
public Value(List<Value> values) {
type = ValueType.LIST;
doubleValue = 0;
longValue = 0;
objectValue = values;
}
public long getLong() {
return longValue;
}
public double getDouble() {
return doubleValue;
}
public List<Value> getList() {
return (List<Value>) objectValue;
}
@Override
public String toString() {
if (type == ValueType.LIST) {
StringBuilder sb = new StringBuilder();
for (Value v : (List<Value>) objectValue) {
if (sb.length() != 0) sb.append(", ");
// inner lists would require special treatment
if (v.type == ValueType.LIST) {
sb.append("[").append(v).append("]");
} else {
sb.append(v);
}
}
return sb.toString();
} else {
return String.valueOf(objectValue);
}
}
}
private static interface OneArgFunctor {
Value exec(Value value);
}
private static interface TwoArgFunctor {
Value exec(Value first, Value second);
}
private enum Operator {
SPACE(" ", 0, true, false, null, null),
TAB("\t", 0, true, false, null, null),
NEWLINE("\n", 0, true, false, null, null),
CR("\r", 0, true, false, null, null),
PLUS("+", 100, false, false, null, new TwoArgFunctor() {
@Override
public Value exec(Value first, Value second) {
return plus(first, second);
}
}),
MINUS("-", 100, false, false, null, new TwoArgFunctor() {
@Override
public Value exec(Value first, Value second) {
return minus(first, second);
}
}),
MULTIPLY("*", 200, false, false, null, new TwoArgFunctor() {
@Override
public Value exec(Value first, Value second) {
return multiply(first, second);
}
}),
DIVIDE("/", 200, false, false, null, new TwoArgFunctor() {
@Override
public Value exec(Value first, Value second) {
return div(first, second);
}
}),
MODULO("%", 200, false, false, null, new TwoArgFunctor() {
@Override
public Value exec(Value first, Value second) {
return modulo(first, second);
}
}),
POWER("^", 300, false, false, null, new TwoArgFunctor() {
@Override
public Value exec(Value first, Value second) {
return power(first, second);
}
}),
RANGE("..", 50, false, false, null, new TwoArgFunctor() {
@Override
public Value exec(Value first, Value second) {
return range(first, second);
}
}),
COMMA(",", 10, false, false, null, new TwoArgFunctor() {
@Override
public Value exec(Value first, Value second) {
return concat(first, second);
}
}),
OPENPAR("(", 0, false, false, null, null),
CLOSEPAR(")", 0, false, false, null, null),
OPENVAR("${", 0, false, false, null, null),
CLOSEVAR("}", 0, false, false, null, null),
MAX("max", 0, false, true, new OneArgFunctor() {
@Override
public Value exec(Value value) {
return max(value);
}
}, null),
MIN("min", 0, false, true, new OneArgFunctor() {
@Override
public Value exec(Value value) {
return min(value);
}
}, null),
FLOOR("floor", 0, false, true, new OneArgFunctor() {
@Override
public Value exec(Value value) {
return floor(value);
}
}, null),
CEIL("ceil", 0, false, true, new OneArgFunctor() {
@Override
public Value exec(Value value) {
return ceil(value);
}
}, null),
ABS("abs", 0, false, true, new OneArgFunctor() {
@Override
public Value exec(Value value) {
return abs(value);
}
}, null),
;
private static Map<String, Operator> symbolMap = new HashMap<String, Operator>();
private String symbol;
private int precedence;
private boolean isWhite;
private boolean isFunction;
private OneArgFunctor functor1;
private TwoArgFunctor functor2;
static {
for (Operator op : values()) {
symbolMap.put(op.symbol, op);
}
}
Operator(String symbol, int precedence, boolean isWhite, boolean isFunction, OneArgFunctor functor1, TwoArgFunctor functor2) {
this.symbol = symbol;
this.precedence = precedence;
this.isWhite = isWhite;
this.isFunction = isFunction;
this.functor1 = functor1;
this.functor2 = functor2;
}
/**
* @return Symbols that don't belong to functions
*/
public static String[] symbols() {
Operator[] values = values();
ArrayList<String> symbols = new ArrayList<>(values.length);
for (int i = 0; i < values.length; ++i) {
if (!values[i].isFunction()) {
symbols.add(values[i].symbol);
}
}
return symbols.toArray(new String[symbols.size()]);
}
public static Operator from(String symbol) {
return symbolMap.get(symbol);
}
public int precedence() {
return precedence;
}
public boolean isWhite() {
return isWhite;
}
public boolean isFunction() {
return isFunction;
}
public void exec(Stack<Value> operands) {
if (functor1 != null) {
operands.push(functor1.exec(operands.pop()));
} else if (functor2 != null) {
Value second = operands.pop();
Value first = operands.pop();
operands.push(functor2.exec(first, second));
} else {
throw new IllegalStateException("This operator cannot be executed.");
}
}
}
} |
// InsertDBRepository inserts a new repository into repository table.
func (pR *PostgresRequests) InsertDBRepository(repository types.Repository) error {
if repository.URL == "" || time.Time.IsZero(repository.CreatedAt) {
return errors.New("Empty repository data")
}
repositoryMap := map[string]interface{}{
"repositoryURL": repository.URL,
"createdAt": repository.CreatedAt,
}
finalQuery, values := ConfigureInsertQuery(
`INSERT into repository`, repositoryMap)
rowsAff, err := pR.DataRetriever.WriteInDB(finalQuery, values...)
if err != nil {
return err
}
if rowsAff == int64(0) {
return errors.New("No data was inserted")
}
return nil
} |
<filename>node_modules/ng-packagr/lib/ngc/compile-source-files.d.ts
import * as ng from '@angular/compiler-cli';
import * as ts from 'typescript';
import { StylesheetProcessor } from '../ng-v5/entry-point/resources/stylesheet-processor';
import { BuildGraph } from '../brocc/build-graph';
import { EntryPointNode } from '../ng-v5/nodes';
export declare function compileSourceFiles(graph: BuildGraph, entryPoint: EntryPointNode, tsConfig: ng.ParsedConfiguration, moduleResolutionCache: ts.ModuleResolutionCache, stylesheetProcessor: StylesheetProcessor, extraOptions?: Partial<ng.CompilerOptions>, declarationDir?: string): Promise<void>;
|
/*
3 6 9
4 4 4
0 0 0
5 7 11
8 11 9
2 8 9
*/
#include<bits/stdc++.h>
#define sf3(x,y,z) scanf("%d %d %d",&x,&y,&z)
#define pf1(x) printf("%d\n",x)
#define FOR(i,x) for(int i=0;i<=x;i++)
using namespace std;
int main()
{
int r,g,b;
while(sf3(r,g,b)==3){
int mn=min(r,min(g,b)),ans=0;
FOR(i,2)
if(mn>=i) ans=max(ans,i+(r-i)/3+(g-i)/3+(b-i)/3);
pf1(ans);
}
return 0;
}
|
<reponame>flyinactor91/METAR-RasPi<gh_stars>1-10
"""
<NAME> - <EMAIL>
config.py - Shared METAR display settings
"""
import json
import logging
from os import path
from pathlib import Path
# Seconds between server pings
update_interval = 600
# Seconds between connection retries
timeout_interval = 60
# Set log level - CRITICAL, ERROR, WARNING, INFO, DEBUG
log_level = logging.DEBUG
# Send METAR Pi logs to a file. Ex: "output.log"
log_file = None
# Set to True to shutdown the Pi when exiting the program
shutdown_on_exit = False
# ------- Plate Settings ------- #
# Seconds between plate button reads
button_interval = 0.2
# Seconds between row 2 char scroll
scroll_interval = 0.2
# Remarks section in scroll line
include_remarks = False
# ------- Screen Settings ------ #
# Size of the screen. Loads the layout from "./screen_settings"
layout = "320x240"
LOC = Path(path.abspath(path.dirname(__file__)))
layout = LOC / "screen_settings" / f"{layout}.json"
layout = json.load(layout.open())
# Run the program fullscreen or windowed
fullscreen = True
# Hide the mouse on a touchscreen
hide_mouse = True
# Clock displays UTC or local time
clock_utc = True
# Clock strftime format string
clock_format = r"%H:%M" # 24-hour
# clock_format = r"%#I:%M" # 12-hour
# Report timestamp strftime format string
timestamp_format = r"%d-%H:%M" # 24-hour
# timestamp_format = r"%d-%#I:%M" # 12-hour
|
/*
* helper class for parser
*/
class SpecialCharacter {
final char baseChar;
final CharacterTransform[] transforms;
SpecialCharacter(char baseChar, CharacterTransform[] transforms) {
this.baseChar = baseChar;
this.transforms = transforms;
}
} |
Making Inferences: The Case of Scalar Implicature Processing
Making Inferences: The Case of Scalar Implicature Processing Judith Degen ([email protected]) Michael K. Tanenhaus ([email protected]) Department of Brain and Cognitive Sciences, University of Rochester Rochester, NY 14627-0268, USA Abstract Scalar implicature has served as a test case for investigating the nature of inference processes in language comprehension. Specifically, the question of whether or not scalar implicatures are computed by default has been extensively investigated in recent years. We argue that the question of default is overly simplistic and propose instead to think of scalar implicature computation as a problem of optimal cue combination within a constraint-based framework. We provide evidence from three experiments supporting the view that multiple constraints of differing strength operate in parallel to provide probabilistic support for or against an implicature. Keywords: experimental pragmatics; scalar implicature; eye- tracking; subitizing Introduction Successful communication requires comprehenders to infer a speaker’s intended meaning from an underspecified utterance. While some information is transmitted via an utterance’s se- mantic content, additional meaning is computed by taking into account pragmatic information about the discourse con- text, as in the sample discourse in (1). Alex: Did you submit your paper? Thomas: Some of the sections are written. Thomas didn’t submit his paper. Some, but not all, of the sections are written. Here, Alex might infer that Thomas, being a cooperative speaker, intends to convey both that he hasn’t yet submitted his paper and that some, but not all, of the sections are written. The former of these inferences is what Grice (1975) termed a particularized conversational implicature (PCI), while the latter is a prototypical example of a generalized conversa- tional implicature (GCI). According to Grice, both of these inferences arise because comprehenders assume that speak- ers conform to certain conventions of rational communicative behavior. The crucial difference between GCIs and PCIs lies in the role that context plays: PCIs are assumed to arise in virtue of special features of the context, while GCIs are as- sumed to arise unless blocked by context. Applied to (1): the inference that Thomas did not submit his paper is tied tightly to Alex’s question and would not have arisen if Thomas’s ut- terance was, for example, an answer to the question whether Thomas had written the introduction yet. However, the infer- ence that some but not all of the sections are written is taken to arise independently of the context. This particular kind of inference, that arises in virtue of a speaker not uttering a rel- evant stronger alternative, is called a scalar implicature. In the example there is a stronger statement the speaker could have made but didn’t (e.g. All of the sections are writ- ten). Under the assumption that a speaker is being as informa- tive as possible, a weak implicature, that the speaker doesn’t know whether all of the sections are written, is licensed. If the hearer further assumes that the speaker is competent with respect to the truth of the stronger statement, the implicature that the speaker believes that some, but not all of the sections are written, is licensed. In recent years the representation and processing of scalar implicatures has emerged as perhaps the defining problem in experimental pragmatics – a subfield of cognitive science that seeks to combine theoretical proposals from linguistics, com- puter science and philosophy with state-of-the-art experimen- tal methods. Importantly for the experimental investigation of the phe- nomenon, scalar implicatures are cancelable, that is, they are defeasible inferences. There are cases in which the scalar im- plicature does not contribute to achieving the discourse goal (Horn, 1984, Sperber & Wilson, 1995, Levinson, 2000). In such cases, where all that is relevant or can be known, is the lower bound (in our example that at least some of the sections are written), the implicature does not arise. Following Katsos, Breheny, and Williams (2005), we will call these contexts lower-bound contexts, and contexts in which pragmatic in- ference is required to achieve the discourse goal upper-bound contexts. Emphasising the GCI-PCI distinction, Levinson (2000) ar- gues that GCIs are pragmatic default inferences that have evolved to maximize the speed of communication. It is only in special contexts that the inference has to be cancelled, where cancellation proceeds in a second, effortful step. In contrast to this default approach, a variety of approaches have viewed scalar implicature as a context-driven process (e.g. Hirschberg, 1991, Sperber & Wilson, 1995). Under these accounts, scalar implicatures are generated as part of the same process as PCIs. The strongest formulation of such accounts is a modular one, whereby pragmatic processing be- gins only once semantic processing is under way (Huang & Snedeker, 2009). That is, generating the implicature requires computing the literal meaning first. A notion of implicit can- cellation is not necessary under the context-driven account since the implicature does not arise in lower-bound contexts in the first place. The default and context-driven approach make different empirical predictions. Under the default model, generating a scalar implicature should be very rapid, indeed as rapid as computing an expression’s literal meaning. An increase in processing effort is predicted only for cases where the impli- cature is cancelled. In contrast, the modular context-driven |
#include <iostream>
#include <vector>
#include <algorithm>
int main() {
long long int kpeers = 0;
long long int kcommands = 0;
std::cin >> kpeers >> kcommands;
if (kpeers < kcommands) {
std::cout << "0 0" << std::endl;
} else {
long long int kfriendsAfterOnesTeam = kpeers - (kcommands - 1);
long long int kmax = kfriendsAfterOnesTeam * (kfriendsAfterOnesTeam - 1) / 2;
long long int peersInOneCommand = (kpeers / kcommands + 1);
long long int kCommandsWithoutPlusOne = kcommands - (kpeers % kcommands);
long long int minPeers = kpeers / kcommands;
long long int kmin = (kpeers % kcommands) * peersInOneCommand * (peersInOneCommand - 1) / 2 + kCommandsWithoutPlusOne * minPeers * (minPeers - 1) / 2;
std::cout << kmin << ' ' << kmax << std::endl;
}
return 0;
}
|
Walking a Tightrope: Using Financial Diaries to Investigate Day-to-Day Financial Decisions and the Social Safety Net of the Financially Excluded
Financially vulnerable, low-income individuals are more likely to experience financial exclusion as they are unable to access financial services that meet their needs. How do they cope with economic instability, and what is the role of social networks in their coping strategies? Using financial diaries, we explore the day-to-day monetary transactions (n = 16,889) of forty-five low-to-moderate income individuals with restricted access to mainstream lending in Glasgow, UK, over a six-month period. Our sample includes users of microcredit and financial advice, as well as nonusers of these services. Findings reveal that informal lending to avoid the pernicious effects of short-term illiquidity was pervasive among these individuals. However, taking informal loans often strains valuable social capital and keeps people from building up a formal credit footprint. Our findings suggest that financially vulnerable populations would benefit from policies that focus on alternative financial mechanisms to help stabilize income-insecure individuals in the short-term.
1. To generate detailed financial data using diaries over a six-month period with individuals with low-to-moderate income to observe if and how they cope with financial instability. 2. To examine the extent to which formal and informal social capital-based financial instruments are used, how these financial networks are mobilized and operationalized, and how their use is managed and negotiated in the context of everyday lives of the financially excluded.
In the next section, we discuss the relationship between income volatility, financial exclusion, and the use of social capital to smooth consumption. We then describe our data and the financial diary methodology. Our results show the major role that social networks play in the lives of our diarists and offer insights about how these are used to deal with life events such as job loss, bereavement, or divorce. Finally, the discussion and conclusion section outlines the need to provide individuals living on the verge of financial exclusion with more effective and affordable services to prevent the damage of individuals' already limited social networks. Our findings suggest that financially vulnerable individuals would benefit from anti-poverty policies that focus on helping them to better cushion for life events.
Background recent studies have highlighted the importance of (increasing) short-term income volatility on the lives of the low-to-moderate income groups of the population. While high month-to-month income fluctuations have been observed in both the Uk (hills, Mcknight, and Smithies 2006;Tomlinson 2018) and the United States (Farrell and greig 2016;hannagan and Morduch 2016), shortterm stability is essential for financial security and intergenerational economic mobility (Morduch and Schneider 2017;Siwicki 2019). Individuals can achieve short-term financial stability, defined as having the means to cope with everyday shocks, while still progressing toward financial goals (Morduch and Siwicki 2017), mainly through: (a) income regularly exceeding expenses, (b) savings, (c) credit, and (d) social networks (Siwicki 2019).
In 2017, half of the Uk adult population showed characteristics of potential financial vulnerability, such as limited financial resilience; low financial capability; suffering a health-related problem that affects a person's day-to-day activity; or a recent life event, such as redundancy or job loss, unexpected reduction in working hours, bankruptcy, relationship breakdown, serious accident or illness, bereavement, or becoming the main carer of a close family member (Financial conduct Authority 2018). This vulnerability is more acute for the low-to-moderate income group, aggravated by low income and irregular unreliable pay combined with frequent expenditure peaks (Financial conduct Authority 2018; Tomlinson 2018). For this poorer population group, high-quality, flexible, and affordable financial services, such as saving or credit products, are essential to manage uncertainty and better cope with both fairly predictable and unexpected everyday challenges. however, an estimated 1.3 million Uk adults did not have a bank account in 2017 (Financial conduct Authority 2018), and around half the population experience more nuanced forms of financial exclusion in that they are unable to access or use mainstream financial services that are appropriate to their needs (e.g., a lack of home contents insurance or savings accounts, which in turn limits access to other products such as mainstream credit) (Bunyan, collins, and Torrisi 2016). Financial exclusion is a dimension of social exclusion (Wilson 2012), and unequal access and use of financial services have been linked to income and socioeconomic inequality and poverty (Affleck and Mellor 2006;Beck and Demirgüç-kunt 2008). Economic theory and empirical evidence indicate that inclusion in financial systems can enhance individual welfare through: (a) improved risk-management, consumption smoothing, and cushioning against "asset-depleting" strategies after an economic shock; and (b) efficient allocation of capital, partly by allowing access to riskier, potentially highly profitable, investments (honohan 2008).
Poorer communities in the Uk are historically more likely to experience financial exclusion with limited options (particularly of saving and credit products) available to them. To help fill this void, Uk and Scottish governments have actively encouraged the development of credit unions and community development finance institutions (cDFIs)-which have emerged as an affordable alternative to high-cost, subprime lending and mainstream banks-to help tackle financial exclusion (Lenton and Mosley 2013;Mchugh, Baker, and Donaldson 2019;Mchugh et al. 2014;Mckillop, Ward, and Wilson 2007). credit unions have traditionally focused on the provision of savings products while some cDFIs offer microcredit: small, fast, affordable loans that do not require collateral or credit history. however, these organizations do not exist at the scale needed to address exclusion, and pockets of the population remain underserved (Bunyan, collins, and Torrisi 2016). In the absence of providers that can help individuals to smooth their income and prevent illiquidity, the poor rely primarily on a host of informal credit providers (collins et al. 2009;Siwicki 2019).
The economic development literature has traditionally argued that social capital, in terms of social networks, positively impacts consumption smoothing (Townsend 1994;de la rocha 2001, 2006, and the same argument has recently been made for the Uk (Pericoli, Pierucci, and ventura 2015). Individuals with higher social capital are likely to be able to smooth their consumption by drawing on cash transfers from relatives and friends or in-kind exchanges. At the same time, experimental evidence has found that risk-pooling in groups of microcredit borrowers increases with higher social interaction (Feigenberg, Field, and Pande 2013). however, income and asset-poor individuals are also usually network poor-their personal networks include mostly resource-poor people in terms of knowledge, wealth, skills, power, and information-so the extent to which they can benefit from social networks is limited (van Eijk 2010; de la rocha 2006).
Our study analyzes high-frequency financial transaction data of forty-five financial diaries, including subjective rationales and field notes, which were collected monthly over six months from financially excluded individuals in glasgow. The most populous city in Scotland was considered ideal to undertake this study as it has traditionally been one of the most socioeconomically deprived in the Uk; home to the ten most deprived neighborhoods in Britain (Stewart et al. 2018) with extreme health inequalities between the richest and poorest being well documented (Marmot 2007). glasgow also has more varied and complex financial products and services for low-income groups when compared to other Uk cities. The high frequency of financial diaries, repeated interaction with diarists, and the consequent building of rapport and trust between the research team and the participants enabled us to develop a detailed understanding of individual coping strategies, including the financial role of social networks in the lives of the financially excluded.
Sampling
To gain access to individuals on the verge of exclusion, sampling was focused on users of nonmainstream financial products (Wilson 2012), such as microcredit and financial advice, as well as on a group of individuals with similar socioeconomic characteristics who were nonusers of these financial products. The sample was recruited through client referrals of a number of service providers working with our target population in glasgow: (1) grameen in the Uk-a microfinance institution (MFI) that supplies microloans to individuals to undertake productive ventures; loans are distributed via group lending, which involves borrowers organizing themselves into groups of five; (2) Scotcash-a not-for-profit social enterprise offering personal microloans and inclusive services such as assistance to open basic and savings bank accounts and financial advice; (3) glasgow central citizens Advice Bureau (cAB)-a publicly funded charity offering financial advice; (4) Money Advice Scotland (MAS)-an umbrella organization that promotes the development of free, independent, impartial, and confidential debt advice and financial inclusion; and (5) glasgow housing Association (ghA)-a not-for-profit organization and Scotland's largest social housing and property management group. ghA works in close partnership with both Scotcash and cAB, and its clients are in a similar financial situation as cDFI clients and financial advice user groups.
A qualitative sampling frame was applied for referral organizations and a purposeful sampling strategy was adopted to maximize variation in terms of individuals': (a) participation in financial inclusion programs-business microcredit (n = 16), personal microcredit (n = 10), money advice (n = 9), and nonusers of such initiatives (n = 10); (b) sociodemographic characteristics such as age, gender, ethnicity, and household composition; (c) disability and health status; and (d) neighborhoods in and around glasgow. Despite our offering incentive payments that paid the highest on completion of all six diaries, due to the sensitivity of the data collected, recruitment and retention were challenging. Snowball sampling, i.e. referrals from study participants, was also used to complement referrals from organizations. Attrition was 21 percent and 45 diarists (out of 57) were included in the analysis. The study was approved by the Ethics committee of glasgow School for Business and Society, glasgow caledonian University. Table 1 presents descriptive statistics of sociodemographic characteristics of the participants who completed four or more financial diaries (n = 45). All research participants were on low-to-moderate incomes, with the majority (82 percent) receiving means-tested welfare benefits. All participants were financially vulnerable so they had at least one of the following characteristics: limited financial resilience; low financial capability; a long-term health condition or disability; or experienced a recent serious life event, such as job loss, relationship breakdown, illness, bereavement, or becoming the main caretaker for a family member (Financial conduct Authority 2018).
Diaries and interviews
Monthly financial diaries were administered with forty-five diarists living in and around the city of glasgow. Financial diaries are systematic records of all daily income and expenditure transactions, as well as gifts, assets, and liabilities, aimed at understanding the money management strategies of low-income populations (Mchugh, Biosca, and Donaldson 2017). Similar to the U.S. Financial Diaries (Morduch and Schneider 2017) and Portfolios of the Poor (collins et al. 2009), in glasgow the term diaries is used to reflect the high-frequency of data collection and not the diarists logging transactions themselves. Through diary data, we explore the financial lives of low-to-moderate income, financially vulnerable individuals, including unique information on behavior and use of financial products. Additionally, information about individuals' financial transactions were used as prompts to generate qualitative data in relation to participants' lives, social networks, life events, and periods of difficulty. Phased data collection took place from February 2016 until March 2017. Diaries were constructed through 306 diary-interviews that took place in participants' homes or workplaces or universities every month. The aim was to collect diaries over a six-month period; however, the duration of data collection varied across participants: four-month diaries (n = 3), six-month diaries (n = 39), and seven-month diaries (n = 3). A baseline questionnaire was administered to collect information on demographic and socioeconomic characteristics of the participants, including information on social capital, financial knowledge and behavior, level and sources of income, and coping strategies, among others. This initial information enabled the construction of a profile for each participant. A similar questionnaire was administered at the end to assess changes in financial lives. The financial transactions (n = 16,889) were recorded and captured in a database adapted from an instrument developed by Microfinance Opportunities. 1 The predefined variables captured for each transaction were: purpose, amount (in £), direction of transaction (outflow/inflow), method of payment (cash, card, financial transfer, etc.), and channel (in person/online/phone). The database also had an open "additional comments" section. For financial transactions, we coded predefined details of the organization or individual involved in the transaction. For example, for informal finance exchanges, we coded the relationship between the individual and the diarist (relative, friend, etc.) and their gender. The data were then exported into Stata software for quantitative descriptive analysis.
Several mechanisms were put in place to control the quality of the diary data. To minimize recall bias, participants were sent weekly reminders in addition to being visited monthly to collect diaries. Diarists' bank statements and receipts were also frequently provided and cross-checked with reported transactions. Inconsistencies in income and savings against expenditure, as well as other misreporting errors, were tracked and addressed on subsequent visits to the diarist. Data on cash-in-hand and savings were used to assess any margins of error between sources and uses of funds, with their causes explored with diarists.
To systematically collect information with a high level of detail and consistency across diarists, and also due to the limited English language and literacy skills of some diarists, three skilled researchers were responsible for recording every income and expenditure transaction that was annotated by the diarists or appeared in bank statements during the preceding month, as well as assets, liabilities, and life events. Subjective comments on each transaction were recorded in the database, such as, for example, the motivation behind asking for a loan.
Our research team also used the diaries to inform qualitative questions based on financial transactions that were recorded in the form of field notes and then collated in "life event sheets" for each diarist (life events occurring in between data collection points). This information was used to understand the reasons behind participants' financial behavior that could not be observed by analyzing income and expenditure patterns alone-for example, why and how they used informal lending. One of the main advantages of the financial diary method was the high levels of trust that developed between researchers and participants through ongoing engagement. In these diary-interviews, diarists shared perceptions and personal details of their lives that were often crucial for the interpretation of the quantitative data.
Data analysis
A mixed-methods approach was used to analyze diary data. First, the descriptive statistics of monthly income and expenditure transactions were examined for each diarist; transactions capturing the use of different financial productsregulated and nonregulated by the Financial conduct Authority (FcA)-and their association with participant's characteristics and key events and shocks (identified through the qualitative data) were explored. Second, we purposely sampled individual cases based on intensity and diversity of financial products used and analyzed the selected cases using corporate finance tools (cash flow statements and monthly balance sheets). Finally, the quantitative interpretation of the results was combined with the qualitative individual stories collected in life event sheets, researcher field notes, and diarist notes on financial transactions, to provide context into diarists' (financial) lives and rationales for their financial behaviors (collins et al. 2009). Pseudonyms are used to maintain diarists' anonymity.
results
The financial diaries of our forty-five participants reveal the intricate financial management strategies used either to cope with periods of cash illiquidity or for investment purposes. This complexity is better understood through the analysis of participants' perceptions of their financial lives and accounts of their decisionmaking processes. Diarists needed to use sophisticated money management strategies mainly due to their general economic insecurity, precarious employment, and frequent month-to-month income and expenditure swings.
Managing day-to-day finances
In the context of low, but also unstable income streams, finance was crucial for diarists to smooth consumption patterns. As shown in Table 2, finance-related transactions-those including credit, savings, insurance, and other financial services-were the third most frequent overall (10 percent, n = 1,758), after groceries (26 percent, n = 4,359) and entertainment (13 percent, n = 2,141). In terms of total value, finance-related transactions were also third (£129,741), after benefits (£225,001), and employment income (£133,504). On average, diarists were making decisions related to financial services such as buying insurance or taking out or paying back a loan approximately every other day, with most of these transactions being related to credit (72 percent), followed by insurance (12 percent), and savings (8 percent). Only four diarists in our sample did not have a current loan (9 percent). Furthermore, a majority of participants (80 percent, n = 36) were simultaneously using at least two types of financial providers during the study, both FcA-regulated and nonregulated. Over the data collection period, study participants were using three credit products on average and one diarist was found to be managing eight loans simultaneously.
In our sample, managing multiple loans was associated with a lack of access to mainstream financial products; even if all diarists had a bank account, they were experiencing nuanced financial exclusion. Most of our participants could not borrow from mainstream financial institutions because of low salaries, unstable employment, part-time or self-employment, having a poor credit history or being "credit invisible," i.e., not having a credit history. Those diarists who did not have a bank loan or a low-cost mortgage were more likely to be managing a portfolio of regulated subprime loans (rent-to-own, car loans, pawn brokers, catalogue, doorstep, etc.), coupled with informal loans from relatives and friends. The wide range of financial providers used as well as the percentage of diarists using them at baseline is shown in Table 3. Throughout the diaries, the role of relatives and friends in assisting with financial instability and helping prevent illiquidity among our sample of individuals was central (see Table 3). This is corroborated by baseline data: when participants were asked what they would do if they had a financial emergency and needed £1000 in a hurry, nearly half of the sample (n = 18) replied they would turn to a relative or friend. The next most common response was "I wouldn't be able to cope," selected by one third (n = 15) of diarists. Only 9 percent (n = 4) of diarists would be able to use savings to cover the emergency expenditure. Instead we observed how diarists mobilized their social networks not only as a coping strategy after a life event or shock but also, more generally, during relatively longer periods of difficulty and financial instability. The six months of coded financial data show, for most diarists, extensive and strategic use of social capital as a safety net. Personal and organizational social networks were mobilized and created to access loans through three main financial mechanisms: (i) nonregulated informal loans, (ii) rotating credit and savings associations (roScAs), and (iii) FcAregulated group microfinance.
Informal mechanisms for managing short-term illiquidity: Rich in friends, poor in nothing?
From all credit transactions recorded, those related to loans with family and friends were the most frequent (34 percent), followed by those with rent-to-own organizations (14 percent). When possible, illiquidity situations were managed through family and friends because participants valued that informal loans were relatively easy to access, fast, small, and repayment was flexible. Table 4 shows that 34 diarists (76 percent) reported at least one informal transaction during the study period (4 diarists did not use any form of credit and 7 used other forms of credit but not informal). The average number of informal transactions during the study period was 12, with one diarist reporting 80 informal transactions during a six-month period. Table 4 also shows that nearly half (47 percent) of the credit transactions reported during the study were informal, with the importance of informal finance depending on access to other more formal forms of credit. For example, microcredit users rely on other credit forms, while recipients of financial advice and nonusers rely mainly on informal sources.
During the six months of data collection, transactions reveal that most diarists repeatedly relied on a person or a small group of very close people of similar socioeconomic characteristics who usually lived nearby. Examples of this are Sabrina and Uma, who were both immigrant single mothers with young families, in their forties, combining part-time employment and self-employment. They were also neighbors and, like many of our diarists, they relied on each other frequently for financial support. Financial transactions between them are an example of how our diarists borrowed or lent money to peers; most of their transactions were small (sometimes only £5) and via electronic transfer as the money needed to reach the bank account fast as it was usually to pay a direct debit or standing order of a bill coming through the following day. Overall, informal loan transactions were slightly more than £70 on average (see Table 4), frequently repaid in less than a week and used to cover shortterm cash shortages. Table 4 shows that, on average, £870 was exchanged informally by each participant using informal finance during the six months (n = 34), with some reporting figures as low as £20 and others close to £4,000. Informal loans were also interest free, but most diarists perceived costs in terms of having to reciprocate to ensure the link remained a reliable source for future needs, even when their own economic situation was insecure. One of the consequences of this tension was that 73 percent of diarists (33 out of 45) used others' credit cards and catalogue subscriptions as a form of payment or to lend to others at least on one occasion during the study. For example, rebecca, a 21-year-old single mother from glasgow used her mothers' credit card repeatedly during the study period, paying back the outstanding balance (around £100) every month. This was effectively used as a credit line depending on the needs: urgent household repairs, furniture for a new flat, social activities, soft-play sessions for her toddler and other general (but essential) day-to-day expenses. This behavior poses important risks to an already vulnerable group in two ways. First, it might affect their/others' credit scores and contribute to deepening financial exclusion in poorer communities. Second, if the borrower is not able to pay back on time to the person who has taken debt on their behalf, they risk losing their already limited social support network. We found that the repayment schedules of informal financial arrangements were frequently not discussed between lender and borrower at the time that the exchange took place. While for repeated transactions between friends, distinguishing between a loan and a gift was straightforward, some diarists found it difficult, in particular if it was a new lender/borrower or in situations where the borrower was perceived to be struggling.
Not all of the diarists in the study had a core network to support them financially. This is in line with the argument in the literature that people in poverty have smaller support networks (van Eijk 2010). Additionally, even when available, support networks were frequently income and asset poor, which posed additional constraints. Throughout his financial diaries, Paul-a 53-year-old Scottish male from Easterhouse, one of the most deprived areas in glasgow-and his sister transferred money back and forth to cushion against illiquidity. Paul's sister was the only person who provided a safety net for him, but Paul was aware that his sister, a pensioner, could not always help as she had her own financial problems. For most research participants, borrowing from family and friends was perceived as a last resort and they were reluctant to do so: they knew it was a lot to ask from people also living in difficult circumstances but, lacking access to alternative forms of finance, they perceived they had no choice.
Parties are not just about fun: Amina's safety net
To show the complexity of financial management strategies used by our diarists, we present the case of Amina. Amina is a single mother of a toddler and a baby born during the study (month 4). She is originally from West Africa but has been living in the Uk for almost 20 years. She is self-employed and runs a clothes shop in central glasgow. When she became a study participant, Amina was five months pregnant. She had been unable to work since the start of the pregnancy and this was severely affecting her life. Income from the shop she owned had lowered substantially during her pregnancy months. Suffering from pains and discomfort, she was not physically fit to work the hours required to make the shop profitable. however, she did not want to close the shop down as she felt she could make it profitable again after delivering her second child and going back to work. This was her only source of income and she wanted to be self-employed. given that business income kept decreasing until after she delivered her baby in month four of the diaries, Amina was constantly thinking about strategies to cope with the decrease in her income and the expenditure associated with a new baby. When we met her, she had just taken a business microloan to buy stock for her shop for when she could return to work. however, the productive nature of this loan did not help her with managing illiquidity. Figure 1 includes Amina's total personal and business income and expenditure every month during the study. During these months, she had been carefully pondering which expenses were necessary and which could be postponed and matched to her income. To cope, she had already taken small loans from subprime lenders as well as her core informal network. Lacking access to other credit options, she thought of a plan. Even though she was not feeling physically up to it, Amina organized a big party for her newborn. She knew that, for cultural reasons, relatives and friends would help with food, drinks, favors, and organizing the party. Amina's monthly budget for month four, shown in Table 5, reveals that for this party she not only managed to persuade friends and relatives to lend her money (£600) to stabilize her situation (diversifying sources of flexible free loans), but she also spent less (£224) than she received as gifts (£700), making a £476 profit. In the end, this strategy of organizing a party, which may seem irrational for an outsider given Amina's economic situation, was more effective than any of her alternatives: it was faster, she made a profit, and had a flexible repayment schedule adapted to her needs and health problems. Amina found a strategy to capitalize on her extended network when she had no other options left to stabilize her economic situation.
Savings for a purpose: join a menage
Another relationship-based financial instrument, used by seven of our forty-five diarists, were roScAs, popularly known as "moneyrounds" or, in Scottish gaelic, "menage." This is a traditional unregulated financial mechanism of saving and credit, which, in its simplest form, consists of a group of individuals who come together and make regular cyclical contributions to a common fund, which is then given as a lump sum to one member in each cycle. These are prevalent in developing country contexts (Ambec and Treich 2007) and have been observed in immigrant populations in advanced countries such as the United States and the Uk (Light and Pham 1998). In our study, three out of seven of the diarists who used roScAs at some point during the study were Scottish. The majority of users were single mothers; all menage participants were female, except for one single young man who was introduced to menage by his mother as a means of saving to pay for accumulated arrears. roScAs were mainly used as a means to save lump sums of money that users perceived they could not have saved otherwise. Menage users had clear objectives for their participation (for example, paying car-related expenses-insurance, MOT, road tax-traveling to visit family, or buying christmas presents). The menages we encountered in the study were not continuously operating; some were only mobilized when an individual of the core network (group of close friends) or members of the church congregation experienced a life event that had financial implications, such as weddings, bereavement, partnership breakups, or an emergency. In this case, the groups were mobilized as a safety net to quickly raise a lump sum of money that diarists would not be able to borrow from individuals in their network. Menage users perceived additional advantages over more "structured" savings products that they could access in the market; like with informal loans, the trust between members allowed for additional flexibility. On emergency occasions when money was tight, individuals could negotiate with the member getting the lump sum and defer payment or even default if it had not been their turn yet; the only consequence of this being the defaulted money would be deducted when their payment was due. The amount paid into menages by Scottish participants was modest (around £10/month), while the more sophisticated ones, run electronically by immigrants living across the country, were up to £200/month (which were doubled-up to £400 by taking two rounds). These differences are likely due to the reasons behind financial exclusion for the two populations. While Scots lacked access to mainstream financial services because of their low incomes and poor credit records, immigrants frequently did not have credit histories, which made them "credit invisible." The duration of the menages varied depending on the number of members (up to fifteen members). The highest amount of money paid in a moneyround to a participant was £3,000. These financial mechanisms operated on the basis of trust and participants were selected carefully: all menage participants could veto a potential new entrant and, to minimize risk, new entrants' turn to receive the money came last or close to last. gender, marital status, having children, race, and religion were all mentioned as selection criteria. For example, one menage was exclusively for single mothers and another only for families with children. In the words of one of the Southeast Asian menage participants: "You don't handle money with white people, you rather keep your business with your own people." The definition of "own people" was broad and the menage included African and Asian first-and second-generation immigrants.
Capitalizing on social networks: Microcredit for business Self-employed participants were able to capitalize on and expand their core social networks by accessing business microcredit. The financial product offered by grameen in the Uk required that prospective borrowers formed groups of five people to meet every week for loan repayment. If someone in the group defaulted on their loan, the group would be dissolved and no further loans provided. The selection of the group members was based on trust so all group members needed to be in the core network of another group member. The group meetings and trust relationships were generally associated with higher flexibility and pooling of resources. Frequently, diarists who were members of one of these grameen in the Uk groups would pay for each other's installments if one of them could not afford to pay that week. In turn, they received (and expected) reciprocal behavior from group members. This additional flexibility provided by the social network cushioned participants against defaulting on their loans and compensated for a relatively rigid microcredit product design.
Discussion and conclusion: Until Debt Do Us Part?
This study has shown the crucial role that relatives and friends play in the everyday finances of relatively poor, financially excluded individuals. Social networks are mobilized in different ways to manage cash flows and avoid the pernicious effects of short-term illiquidity. however, this can result in the networks of the more vulnerable, frequently small and also resource-poor, being damaged because of financial matters. While the importance of social exchange between neighbors, relatives, and friends as a survival strategy for the low-income population resonates well with the development literature, some authors have highlighted that being excluded from, for example, the labor market will affect the financial and social resources of individuals and, subsequently, their ability to be part of a relationship of reciprocity (de la rocha 2006). This article builds on this by exploring the association between financial exclusion and social networks. The alternatives for the financially excluded are few or nonexistent. Our results confirm the importance in the Uk of credit over savings to cope with financial insecurity (hood, Joyce, and Sturrock 2018), with only 22 percent of Uk adults not holding credit of any kind in 2017 (Financial conduct Authority 2018). These results also align well, given the sociodemographic characteristics of our participants, with the fact that 24 percent of Uk adults had less than £1,000 as a savings buffer in 2017 (Financial conduct Authority 2018). Other forms of finance, mainstream and subprime, are found to be complementary to those provided by social networks. Individuals end up managing a portfolio of loans and making continuous financial decisions, which are particularly complex in terms of their relatively large amounts, their future implications, and because they cannot be easily reversed. These findings have implications for the nature of social networks, policy, and practice.
Social networks can provide credit in times of need and lead to increased participation in risk-pooling within formal and informal institutions, such as microcredit group lending and roScAs. repeated interactions between members, through use of these financial products, create more space for solidarity and flexible arrangements. however, this sophisticated use of social networks can have detrimental effects on already low levels of social capital in these communities and increase individuals' exclusion from mainstream finance.
The pervasiveness of social networks for financial stability has the potential to alter the qualitative nature of the individuals' relationships. Instead of drawing on friends and family occasionally or in an emergency, our data suggest that informal lending is common and well-established. If money comes to define relationships, the relationship can be put under pressure. A further unintended consequence is that individuals do not have the opportunity to build up their formal credit footprint and will more likely remain "credit invisible." Thus, despite having sophisticated financial management skills, these individuals will continue to be excluded from the mainstream and unable to access financial products and services, such as mortgages and insurance products, which could enhance their life.
Policy and practice: What should be done?
Our financial diary data highlight that low-to-moderate income individuals living on the verge of financial exclusion do not always have enough cash to cover their basic needs, and they turn to (in)formal lending to manage their financial (in)stability. Indeed, individuals' financial lives are so complex that diarists are making financial-related decisions approximately every other day, which represents a significant cognitive burden.
This article reveals that social networks, instead of the state, operate as the main safety net of financially excluded individuals. The importance of social networks for financial stability, even in advanced economies such as the Uk, indicates that (a) the design of the current welfare system does not cope well with growing income volatility and financial insecurity, and (b) there is a gap in the provision of credit. Policy can respond to this by tackling the bigger, systemic issue of meeting basic needs and providing better cushioning for financial instability or responding to the gap in the provision of finance. Our focus here is on ways to address the latter issue.
One way to reduce the reliance on social networks is to promote "alternative" economic spaces that prioritize the interests and well-being of their users, such as cDFIs (Mchugh, Baker, and Donaldson 2019). however, the government needs to support these institutions, as it is difficult to sustainably offer financial products to low-to-moderate income individuals (Wilson 2012). To succeed, these financial inclusion policies need to be part of a more general policy agenda to address growing levels of financial insecurity in different income levels of the population. The reliance on social networks also conditions how relationships are understood in poorer communities and incentivizes individual behaviors that can harm social networks and promote financial exclusion even further. Alternative high-quality, affordable, fast, safe, and flexible financial products that allow individuals to build up credit histories are required to help the financially insecure. These should be combined with anti-poverty policies that are better adapted to the needs of those who do not have enough slack to cope with the ups and downs of everyday life. |
package net.minecraft.client.gui.screen;
import com.mojang.blaze3d.matrix.MatrixStack;
import net.minecraft.client.AbstractOption;
import net.minecraft.client.GameSettings;
import net.minecraft.client.gui.DialogTexts;
import net.minecraft.client.gui.widget.button.Button;
import net.minecraft.client.gui.widget.button.OptionButton;
import net.minecraft.entity.player.PlayerModelPart;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.text.TranslationTextComponent;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
@OnlyIn(Dist.CLIENT)
public class CustomizeSkinScreen extends SettingsScreen {
public CustomizeSkinScreen(Screen parentScreenIn, GameSettings gameSettingsIn) {
super(parentScreenIn, gameSettingsIn, new TranslationTextComponent("options.skinCustomisation.title"));
}
protected void func_231160_c_() {
int i = 0;
for(PlayerModelPart playermodelpart : PlayerModelPart.values()) {
this.func_230480_a_(new Button(this.field_230708_k_ / 2 - 155 + i % 2 * 160, this.field_230709_l_ / 6 + 24 * (i >> 1), 150, 20, this.func_238655_a_(playermodelpart), (p_213080_2_) -> {
this.gameSettings.switchModelPartEnabled(playermodelpart);
p_213080_2_.func_238482_a_(this.func_238655_a_(playermodelpart));
}));
++i;
}
this.func_230480_a_(new OptionButton(this.field_230708_k_ / 2 - 155 + i % 2 * 160, this.field_230709_l_ / 6 + 24 * (i >> 1), 150, 20, AbstractOption.MAIN_HAND, AbstractOption.MAIN_HAND.func_238157_c_(this.gameSettings), (p_213081_1_) -> {
AbstractOption.MAIN_HAND.setValueIndex(this.gameSettings, 1);
this.gameSettings.saveOptions();
p_213081_1_.func_238482_a_(AbstractOption.MAIN_HAND.func_238157_c_(this.gameSettings));
this.gameSettings.sendSettingsToServer();
}));
++i;
if (i % 2 == 1) {
++i;
}
this.func_230480_a_(new Button(this.field_230708_k_ / 2 - 100, this.field_230709_l_ / 6 + 24 * (i >> 1), 200, 20, DialogTexts.field_240632_c_, (p_213079_1_) -> {
this.field_230706_i_.displayGuiScreen(this.parentScreen);
}));
}
public void func_230430_a_(MatrixStack p_230430_1_, int p_230430_2_, int p_230430_3_, float p_230430_4_) {
this.func_230446_a_(p_230430_1_);
func_238472_a_(p_230430_1_, this.field_230712_o_, this.field_230704_d_, this.field_230708_k_ / 2, 20, 16777215);
super.func_230430_a_(p_230430_1_, p_230430_2_, p_230430_3_, p_230430_4_);
}
private ITextComponent func_238655_a_(PlayerModelPart p_238655_1_) {
return DialogTexts.func_244281_a(p_238655_1_.getName(), this.gameSettings.getModelParts().contains(p_238655_1_));
}
} |
/**
* @param url
* @return the imgur type and hash, or null if the URL was too tricky
*/
public static ImgurTypeHash imgurURLtoHash(URL url) {
if (!url.getHost().contains("imgur"))
return null;
final String path = url.getPath();
final String[] split = path.split("[/]+");
if (split.length == 0)
return null;
else if (split.length == 2) {
if (split[1].equals("gallery"))
return new ImgurTypeHash(ImgurType.GALLERY, null);
else {
final Matcher matcher = hashPattern.matcher(split[1]);
if (matcher.find())
{
final String hash = split[1].substring(0, matcher.end());
return new ImgurTypeHash(ImgurType.IMAGE, hash);
}
return null;
}
}
else {
final String hashPart = split[split.length - 1];
final String typePart = split[split.length - 2];
ImgurType type = ImgurType.IMAGE;
if (typePart.equals("a"))
type = ImgurType.ALBUM;
final Matcher matcher = hashPattern.matcher(hashPart);
matcher.find();
final String hash = hashPart.substring(0, matcher.end());
return new ImgurTypeHash(type, hash);
}
} |
Obama bound the speech together with the theme of Romney’s dishonesty. | John Shinkle/POLITICO Obama returns to hard tack on Mitt
FAIRFAX, Va. — President Barack Obama seems to have remembered the things he forgot to say in the debate.
The president was in an aggressive mood here Friday as he addressed a women’s rally at George Mason University in suburban Virginia. Unlike on stage in Denver on Wednesday, he referenced many of what have proven the most potent attacks of his campaign, including knocking Mitt Romney’s “47 percent” comments and unloading on the Republican nominee’s positions on women’s health. And he talked in more length and more depth than any time recently about his health care reforms.
Story Continued Below
Obama bound the speech together with the theme of Romney’s dishonesty and what’s become the most memorable single line out of the first debate — Romney’s pledge to cut funding for PBS by defunding Big Bird.
( Also on POLITICO: Obama touts low unemployment rate)
“Gov. Romney plans to let Wall Street run wild again but he’s going to bring down the hammer on Sesame Street,” Obama said.
Later, Obama tied it to a broader Romney critique that he is not being forthright about his economic plans.
“Gov. Romney said he’d get rid of Planned Parenthood funding,” Obama said. “Apparently this, along with Big Bird, is driving the deficit.”
The crowd briefly chanted “P-B-S.”
But the biggest change from recent Obama speeches was an extended boasting about the health care reforms. He tipped into this through a reference to Romney’s claim at the debate that his health care plan covers pre-existing conditions. Obama said that was phony — and cited Romney aide Eric Fehrnstrom’s debate spin room walk-back of Romney’s debate comment as evidence.
“Gov. Romney was fact-checked by his own campaign,” Obama said. “That is rough.”
The Romney campaign disagreed with Obama’s assessment.
( Also on POLITICO: Inside the campaign: Reinventing Romney)
“The attempts to confuse and misstate Gov. Romney’s position on protecting those with pre-existing conditions simply will not work,” Romney campaign policy director Lanhee Chen told POLITICO in a statement, calling the policy “consistent and clear.”
“Insurance companies will be prohibited from denying coverage to those with pre-existing conditions who have maintained continuous coverage. This protects individuals and families who might lose their job, or who simply want to own their own insurance instead of receiving it from an employer,” Chen said. |
package it.unica.ro.cvrpb.model;
/**
* The BackhaulCustomer class represents a Backhaul customer in a Vehicle Routing Problem.
* A backhaul customer is a customer requiring a given amount of product to be picked-up.
*/
public class BackhaulCustomer extends Customer {
/**
* Constructs a new Backhaul customer, given its coordinates and the required amount of product.
* @param v a vertex representing the coordinates of the customer
* @param load the amount of product to be picked-ip
*/
public BackhaulCustomer(Vertex v, int load) {
super(v, load);
}
/**
* Returns the amount of product to be picked-up
* @return the amount of product to be picked-up
*/
public int getPickupLoad() {
return getLoad();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isLinehaul() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isBackhaul() {
return true;
}
}
|
<filename>src/EditorTextView.cpp
#include "EditorTextView.h"
EditorTextView::EditorTextView(BRect frame, const char* name, BRect textRect, uint32 resizingMode, uint32 flags, const BMessage &msg, const BMessenger &msgr)
: BTextView(frame, name, textRect, resizingMode, flags), statusMessage(msg), statusMessenger(msgr)
{
}
void EditorTextView::MouseDown(BPoint point)
{
statusMessage.MakeEmpty();
statusMessage.AddInt64("clearStatus", 1);
statusMessenger.SendMessage(&statusMessage);
BTextView::MouseDown(point);
}
void EditorTextView::KeyDown(const char* bytes, int32 numBytes)
{
statusMessage.MakeEmpty();
statusMessage.AddInt64("clearStatus", 1);
statusMessenger.SendMessage(&statusMessage);
BTextView::KeyDown(bytes, numBytes);
}
|
<reponame>Thanduriel/StableNN
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import math
def isFloat(s):
try:
float(s)
return True
except ValueError:
return False
withLines = True
pointSets = [[],[],[],[],[]]
names = []
learningRates = [[],[],[],[],[]]
# parse file
with open('freqsSingle.txt') as f:
nets = []
for line in f:
nums = line.split(',')[:-1]
if len(nums) > 0:
# first row is network name
if not isFloat(nums[0]):
#first part of the name is the learning rate
names.append(int(nums[0].split('_')[0]))
nums = nums[1:]
nums = [float(x) for x in nums]
nets.append(nums)
for j in range(0,len(nets)):
freqs = nets[j]
# group by number of attractors
for i in range(1, len(freqs)):
if freqs[i] > 0.0:
idx = len(freqs)-1 # num attractors
# freqs[0] is the time step
pointSets[idx].append([freqs[0], freqs[i]])
if withLines:
learningRates[idx].append(names[j])
if withLines:
learningRates = [[], np.array(learningRates[1]),np.array(learningRates[2])]
array1 = np.array(pointSets[1]);
array2 = np.array(pointSets[2]);
array3 = np.array(pointSets[3]);
fig1, ax1 = plt.subplots()
shift = 0.05
if withLines:
maxLr = np.max(learningRates[1])
for i in range(0, maxLr):
arr = np.sort(array1[learningRates[1]==i], axis=0)
plt.plot( arr[:,0], 1.0 / arr[:,1], color="b", marker = 'o')
if len(array2) > 0:
arr = np.sort(array2[learningRates[2]==i], axis=0)
plt.plot( arr[0::2,0], 1.0 / arr[0::2,1], color="r", marker = 'o')
plt.plot( arr[1::2,0], 1.0 / arr[1::2,1], color="r", marker = 'o')
ax1.legend(["1 attractor", "2 attractor"])
else:
plt.plot( array1[:,0]*(1.0-shift), 1.0 / array1[:,1], "b+")
if array2.size > 0:
plt.plot( array2[:,0], 1.0 / array2[:,1], "r+")
if array3.size > 0:
plt.plot( array3[:,0]*(1.0+shift), 1.0 / array3[:,1], "y+")
ax1.legend(["1", "2", "3"])
ticks = np.unique(array1[:,0])#np.unique(np.concatenate((array1[:,0], array2[:,0])))
#if ticks[-1] / ticks[0] > 10:
# ax1.set_xscale('log')
ax1.set_xticks(ticks)
ax1.get_xaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter())
plt.xlabel('time step')
plt.ylabel('frequency')
plt.show()
|
<reponame>eyalbetzalel/sparse_attention<filename>janky_stuff.py<gh_stars>10-100
import numpy as np
class JankySampler:
def __init__(self, arr, seed=None):
self.arr = arr
self.nprng = np.random.RandomState(seed)
self.reset()
def reset(self):
self.drawn = 0
self.idx = self.nprng.permutation(len(self.arr))
def draw(self, n):
'''
Shuffle the array if it's exhausted and draw `n` samples without
replacement.
'''
if n > len(self.arr):
raise ValueError("It looks like you tried to draw more than there are in the list")
if self.drawn + n > len(self.arr):
self.reset()
end = self.drawn + n
retval = self.arr[self.idx[self.drawn:end]]
self.drawn = end
return retval
class JankySubsampler:
'''
To be used with iter_data_mpi. This class reports it has the same number
of examples as `arrays[0]`, but returns a mixed slice of examples from all
`arrays`.
'''
def __init__(self, arrays, pmf, seed=None):
assert len(pmf) == len(arrays)
self.pmf = pmf
self.arrays = arrays
self.samplers = [JankySampler(arr, seed=seed) for arr in arrays]
self.idxs = np.arange(len(self.pmf))
self.nprng = np.random.RandomState(seed)
for arr in arrays[1:]:
assert arrays[0].shape[1:] == arr.shape[1:]
@property
def shape(self):
return self.arrays[0].shape
def __getitem__(self, key):
n = len(key)
arr = self.nprng.choice(self.idxs, size=n, p=self.pmf)
ret = np.concatenate(
[sampler.draw((arr == idx).sum())
for idx, sampler in zip(self.idxs, self.samplers)],
axis=0)
return ret
if __name__ == "__main__":
# Test 1
arr = np.arange(9)
sampler = JankySampler(arr, seed=42)
for _ in range(4):
print(sampler.draw(8))
# Test 2
pmf = [0.5, 0.5]
arr1 = np.arange(4 * 2).reshape(4, 2) + 1
arr2 = -np.arange(8 * 2).reshape(8, 2)
subsampler = JankySubsampler((arr1, arr2), pmf, seed=42)
aux_arr1 = np.ones((4,))
aux_arr2 = np.zeros((8,))
aux_subsampler = JankySubsampler((aux_arr1, aux_arr2), pmf, seed=42)
dummy_indices = np.arange(4) # Draw 4 arrays at a time
for _ in range(10):
print(subsampler[dummy_indices])
print(aux_subsampler[dummy_indices])
|
// GetEnvStruct fetch env from tag struct
func GetEnvStruct(b []byte) (records []string) {
results := goEnvStructRgx.FindAllSubmatch(b, -1)
for _, r := range results {
if len(r) > 1 {
cleaned := strings.Split(strings.Trim(string(r[1]), " "), ",")
records = append(records, cleaned[0])
}
}
return unique(records)
} |
<gh_stars>0
class Greeting2 {
public static void main( String[] args ){
if (args.length != 1){
System.err.println("Nem megfelelő számú parancssori argumentum.");
System.exit(1);
} else {
System.out.println("Hello " + args[0] + "!");
}
}
}
|
import cancelAndSaveJob from 'util/job';
import { WorkflowListener, WorkflowEvent, WorkflowListenerConfig, EventType } from './workflow-listener';
import { getWorkflowByName } from '../util/workflows';
export default class WorkflowTerminationListener extends WorkflowListener {
constructor(config: WorkflowListenerConfig) {
const fullConfig = {
...config,
...{
eventType: EventType.ADDED,
reasonRegex: '(Workflow|WorkflowNode)Failed',
messageRegex: '.*Terminate.*',
namespace: 'argo',
},
};
super(fullConfig);
}
async handleEvent(event: WorkflowEvent): Promise<void> {
// retrieve the workflow using the name in the event
const workflow = await getWorkflowByName(event.involvedObject.name, this.logger);
const requestId = workflow.metadata.labels.request_id;
this.logger.info(`Received termination request for job ${requestId}`);
// cancel the job (without triggering an argo workflow termination)
await cancelAndSaveJob(requestId, 'Canceled by admin', this.logger, true, null, true);
}
}
|
Supporting change in adoption it's only a stepfamily adoption
Abstract Stepfamily Scotland was established six years ago as an autonomous organisation in Scotland to complement the work of the National Stepfamily Association, which was then based in London. The NSA has now merged into an organisation called Parentline Plus (about 18 months ago) leaving SFS as the only stepfamily specific support service in the U.K. |
<filename>public/app-thingspin-fms/grafana_custom/core.ts
import {
profiler,
registerAngularDirectives,
arrayJoin,
searchDirective,
liveSrv,
layoutSelector,
switchDirective,
infoPopover,
Emitter,
appEvents,
dashboardSelector,
queryPartEditorDirective,
sqlPartEditorDirective,
colors,
formDropdownDirective,
assignModelProperties,
KeybindingSrv,
JsonExplorer,
NavModelSrv,
NavModel,
geminiScrollbar,
orgSwitcher,
manageDashboardsDirective,
TimeSeries,
updateLegendValues,
searchResultsDirective,
} from 'app/core/core';
import { contextSrv } from 'app-thingspin-fms/angular-modules/core/services/tsContextSrv';
import coreModule from './core_module';
export {
profiler,
registerAngularDirectives,
arrayJoin,
coreModule,
searchDirective,
liveSrv,
layoutSelector,
switchDirective,
infoPopover,
Emitter,
appEvents,
dashboardSelector,
queryPartEditorDirective,
sqlPartEditorDirective,
colors,
formDropdownDirective,
assignModelProperties,
contextSrv,
KeybindingSrv,
JsonExplorer,
NavModelSrv,
NavModel,
geminiScrollbar,
orgSwitcher,
manageDashboardsDirective,
TimeSeries,
updateLegendValues,
searchResultsDirective,
};
|
import blessed, { Widgets } from 'blessed';
import { Element, ElementConfig } from './Element';
import { WorktreeElement } from './WorktreeElement';
import { BranchesElement } from './BranchesElement';
import { StashElement } from './StashElement';
import { Git } from '../services/git';
import ListbarOptions = Widgets.ListbarOptions;
import { ChangelogElement } from './ChangelogElement';
import { DefaultTheme } from '../themes/default';
type MenuSelectCallback = (selectedElement: Element) => Promise<void>;
export class MainMenuElement extends Element {
readonly #listbar: Widgets.ListbarElement;
readonly #git: Git;
get instance() {
return this.#listbar;
}
constructor({ git, ...config }: ElementConfig, onMenuSelect: MenuSelectCallback) {
super();
this.#git = git;
this.#listbar = blessed.listbar({
...config as ListbarOptions,
keys: true,
mouse: true,
autoCommandKeys: true,
style: {
...DefaultTheme.listStyle,
focus: {
bg: 'red',
},
} as any,
items: [],
commands: [],
});
this.#createMenuItems(onMenuSelect);
}
#createMenuItems(onMenuSelect: MenuSelectCallback) {
const elementsConfig: ElementConfig = {
git: this.#git,
left: 0,
right: 0,
top: 1,
bottom: 0,
};
const items = [{
name: 'Worktree',
element: new WorktreeElement(elementsConfig),
}, {
name: 'Branches',
element: new BranchesElement(elementsConfig),
}, {
name: 'Stash',
element: new StashElement(elementsConfig),
}, {
name: 'Changelog',
element: new ChangelogElement(elementsConfig),
}];
items.forEach((item) => {
item.element.init().then(() => {
item.element.instance.screen.render();
});
this.#listbar.addItem(item.name as any, async () => {
await onMenuSelect(item.element);
});
});
}
override async init(): Promise<void> {
this.#listbar.selectTab(0);
}
}
|
Colorectal carcinoma in Hong Kong: epidemiology and genetic mutations.
The incidence of colorectal carcinoma is rising at an alarming pace in Asian urban societies such as Hong Kong. Detailed examination of the epidemiological pattern and genetic mutation of colorectal cancer in the Hong Kong Chinese population is overdue. We compared the reported age incidence of colorectal carcinoma in Hong Kong with that of Scotland and other countries. Hong Kong showed a much higher incidence of colorectal carcinoma among the young age groups. By comparison with other countries, this raised incidence among the young appeared to be related to southern Chinese societies. The recent dramatic rise in colorectal cancer in Hong Kong was largely attributable to an increase in the over 50 years age group, while the young incidence remained unchanged. We also defined the mutation spectrum of p53 and Ki-ras in 67 unselected cases by direct DNA sequencing. Interestingly, insertion/deletion mutations in p53 from colorectal carcinoma in Hong Kong showed a significantly higher frequency (17.2%) than the Scottish data (0%) and the world database (6.6%), although the overall frequency of p53 mutation (43%) in Hong Kong was similar to others. The high incidence of colorectal carcinoma in young people and the raised proportion of frameshift mutations in p53 encourage further search for a genetic basis for susceptibility to this disease in the Hong Kong Chinese population.
The age standardized incidence rate in 1991 was 35.4:100 000 for men and 28.5:100 000 for women (Hong Kong Cancer Registry, 1995). More than 95% of the population are ethnic Chinese and, although there have doubtless been many changes in the past few decades, the majority have a life style and diet still greatly different to those of the West.
Genetic changes are also very significant in colorectal carcinogenesis. Most tumours have acquired (non-germline) inactivating mutations in adenomatous polyposis coli (APC) (Ashton Rickardt et al, 1989;Nishisho et al, 1991;Powell et al, 1992); activating mutations in the Ki-ras oncogene are present in about 40% of carcinomas (Bos et al, 1987;Forrester et al, 1987); and mutations in the p53 and 'deleted in colon carcinoma' (DCC) oncosuppressor genes are found in 70-80% of carcinomas (Vogelstein et al, 1988;Baker et al, 1989;Vogelstein et al, 1989;Baker et al, 1990;Fearon et al, 1990;Hollsetein et al, 1991). Moreover, although most cancers arise in patients over the age of 50 years, some are diagnosed substantially earlier, and in such younger patients there is often evidence for inherited susceptibility to the disease. One example of this is the hereditary non-polyposis colon cancer (HNPCC) syndrome, in which there is inherited deficiency of one of the nucleotide mismatch repair genes, and this deficiency is expressed with high penetrance in kindreds showing a mendelian dominant pattern of inherited susceptibility to colorectal cancer. However, recent examination of atypically young patients with colorectal cancer,' even those with no known family history, has revealed a proportion of individuals with germline mutation in mismatch repair genes that may be as high as 42% (Liu et al, 1995). Mutations apparently driven by the defective mismatch repair are found in classical colorectal cancer genes, such as APC, and have a characteristic signature, with a predominance of nucleotide deletions or insertions within simple repeat sequencesunlike the transversion type of point mutations that characterize the interactions of many environmental carcinogens with DNA (Huang et al, 1996). Hence, detailed study of the age incidence and the mutational spectra in colorectal cancer may shed light not only on differences in environmental carcinogens but also on the relative importance of genetic susceptibility in different populations.
Here, we report the unusual age-incidence pattern of colorectal cancer in Hong Kong. As expected, we show the dramatic rise in incidence of disease in the over 50 years age group within the past 10-20 years. However, we also demonstrate features suggestive of the presence of a susceptibility gene within the Hong Kong populationa relatively high incidence in young people and a raised proportion of frameshift mutations in the p53 oncosuppressor gene in comparison with Caucasian populations. Evidence is also presented that this may be a southem Chinese characteristic, distinct from the factors responsible for the rising overall incidence of colorectal cancer in urbanized south east Asian populations.
MATERIALS AND METHODS Analysis of epidemiology data
Data on cancers of colon and rectum from the cancer registries from Hong Kong and Scotland recorded in the WHO Cancer Incidence in Five Continents 1978-82 (Muir et al, 1987) and 1983-87 (Parkin et al, 1992 were analysed. Data from Hong Kong Govemment statistics (Hong Kong Government, 1982-95;Hong Kong Cancer Registry, 1995) and Scottish health statistics for 1989 were also analysed and compared in detail. Finally, we also sought pathological data from the Hong Kong Cancer Registry concerning the nature of the cancers of colon and rectum. For all the cases in the Registry, a diagnosis of colorectal cancer was given and, for 80% of these cases, a histopathological report was available for confirmation.
The population-based Cancer Registry in Hong Kong has been operating since 1963. The majority of the data were from oncology and pathology departments of all government-funded and private hospitals, and the rest came from discharge summaries of all public and private hospitals and death certificates, in which the cause of death was a compulsory recording by accredited medical practitioners. Duplicate registration was eliminated by checking the demographic data in which the Identity Card number is unique for every individual in Hong Kong. The registration data in Scotland were mainly derived from hospital in-patient sources and a small proportion came from out-patient departments, and death certificates.
Analysis of Ki-ras and p53 mutations Tissue and DNA extraction Sixty-seven unselected colectomy specimens with diagnosis of adenocarcinoma received in Queen Mary Hospital in the year 1990-91 were studied. Thirty-six were from male and 31 from female Chinese patients. The patients' age ranged from 24-88 years, with 13% aged below 40 years. The specimens were received unfixed on ice from the operating theatre, and representative blocks were taken from both the tumour and the normal mucosa, snap frozen in liquid nitrogen and stored at -70°C. The rest of the specimens were fixed in 10% buffered formalin and processed through paraffin for histology.
Frozen sections, prepared from the stored frozen blocks, were assessed under light microscope. DNA was extracted by proteinase K digestion, phenol-chloroform extraction and ethanol precipitation. Only blocks with tumour occupying more than 70% of section area were used. At the same time, DNA was also extracted from normal mucosa.
p53 mutations
Immunohistochemical studies were performed using monoclonal antibodies PAb 1801 and PAb 240 (Oncogene Science) and polyclonal antibody CM1 (Novocastra), using either frozen (PAb 240 only) or paraffin sections, the latter fixed in formalin or PLPD, using a standard ABC technique with and without microwave pretreatment (Purdie et al, 1991;Cripps et al, 1994).
The polymerase chain reactions (PCR) were performed on 0.5 ,ug of DNA samples, in a 50-,ul reaction containing 200 gM of each deoxynucleotide, 0.5 jiCi dCTP, 0.33 gM of each primer and 1 unit of Taq polymerase in appropriate buffer. PCRs were performed in a DNA thermocycler (Perkin Elmer) with the following temperature profile: 94°C for 4 min then 30 cycles of 94°C for 40 s, 58-63'C (depending on primers) for 40 s, and 72°C for 1 min, then 72°C for 10 min. Of the PCR products, 7 gl was mixed with 5 ,l of sequencing stop solution, heated to 80°C for 2 min and 5 p1 was loaded onto a 5% glycerol-6% polyacrylamide gel. The gels were run at room temperature, 3 W for 12-15 h in vertical polyacrylamide gel apparatus (Hoefer). The gel was fixed, dried and then exposed to radiographic films. Three of the mutation 'hot spots' in p53 form part of the recognition sequence of known restriction endonucleases: codon 248 (exon 7) and 282 (exon 8) form part of Msp I site and codon 175 (exon 5) form part of Hae II site. Hence, we used a rapid nonradioactive PCR-based method to screen for mutations in these codons. Two PCR fragments were used: one spanning exon 7-8, while the other covered only exon 5. These were digested using the appropriate restriction enzymes, and the resulting fragments were analysed by ethidium bromide-stained 2% agarose gel.
Direct DNA sequencing was performed on single-stranded DNA templates generated by asymmetric PCR using either excess 5' or 3' primers (Gyllensten and Erlich, 1988). Both DNA strands of the PCR products were sequenced using the chain termination method with dATP following the manufacturer's protocols (Pharmacia). The samples were denatured at 80°C for 5 min and electrophoresed through a 6% polyacrylamide-urea gel. After electrophoresis, the gel was fixed, dried and exposed to autoradiographic film.
Ki-ras mutations
For detection of mutations in Ki-ras codons 12 and 13, similar methods were used as described above for the detection of p53 mutations. The primers used were as follows: 5'-ACTGA-ATATAAACTTGTGGTAGTTGGACCT-3' and 5'-TCAAAGA-ATGGTCCTGGACC-3'. The PCRs were performed on 0.25 jig of DNA, in a 50-jil solution containing 0.2 gM dNTPs, 0.3 jM of each primer and one unit of Taq DNA polymerase. The reactions were performed in a DNA thermocycler (Perkin Elmer) with the following temperature profile: 940C for 3 min, then 30 cycles of 94°C for 1 min, 55°C for 1.5 min and 72°C for 2 min, followed by British Journal of Cancer (1997) 76(12) . In this second period, however, Hong Kong demonstrated a substantial increase in the overall incidence of colorectal cancers, attributable almost entirely to increased incidence in the population aged 50 years or above. In Scotland, there was little change in the age-incidence pattern between the two study periods. A similar pattern was also observed in analysing the most recent available data for 1990 and 1991. The exact pathology or ICD coding of these patients in the second study period, i.e. 1983-87, were sought from the Hong Kong Cancer Registry. There were 573 cases with a diagnosis of colorectal cancer in those under the age of 40 years. Only 16 cases (2.78%) had diagnoses of malignancies not related to adenocarcinoma. These included malignant melanoma, malignant teratoma, leiomyosarcoma, squamous carcinoma and basaloid carcinoma. The remainder were listed as either adenocarcinoma or related malignancies, such as mucinous adenocarcinoma or signet ring cell carcinoma.
We next investigated whether this higher incidence of colorectal cancer in the young Hong Kong population was a feature of other eastern Asian populations showing rising overall incidence of the disease. Accordingly, we compared the incidence of colorectal cancer in the younger age groups of several populations, some of southern Chinese extraction, some showing the Eastern Asian recent rise in incidence and others with different features (Table 1). Hong Kong, Singapore (Chinese) and Shanghai -all with populations of predominantly southern Chinese ethnic backgroundall shared the relatively high incidence of colorectal cancer in younger age groups, although only in Hong Kong and Singapore was there a large rise in incidence in older age groups between 1978-82 and 1983-87. In contrast, Tianjin, a northern Chinese city, had low incidence in young people. Japan (Osaka) also had low incidence in young people, although it shared the recent rising British Journal of Cancer (1997) 76(12), 1610-1616 9fw.: Aw, Table 1 Comparison for different countries/cities of the incidence of colorectal carcinoma (CRC) in young age groups (20-40 years) and the overall world agestandardized rate (ASR) in the period 1983-87, and the percentage increase in overall world ASR in this period compared with 1978-82 (Muir et al, 1987;Parkin et al, 1992) trend in older ages with Hong Kong and Singapore. USA (white) had a similar pattern to that of Scotland: a low incidence in young people and a static high incidence in the older age groups. These differences in the incidence of colorectal cancer in various age groups between Hong Kong, Japan (Osaka), Scotland and USA (white) in the period 1983-87 are shown in Figure lB.
Ki-ras mutations
Analysis of Ki-ras mutations using PCR and DNA sequencing identified 21 mutations (29.4%) in the 67 cases studied. Of these, 20 were at codon 12 and only one at codon 13. Twelve were G-*A transitions, eight G-4T transversions and one was a G-*C transversion. The incidence and spectrum of the mutations are not significantly different to that obtained by the same methods in Scotland. The nature of the DNA mutations and the corresponding amino acid change are indicated in Table 2. p53 mutations With immunohistochemical studies, there were 32 (47.1%) cases stained positive for p53 protein, a figure almost identical to that obtained by similar methods in Scotland (Purdie et al, 1991). All these cases showed strong nuclear staining in the majority of the tumour cells. The use of microwave for antigen retrieval would add a further three positive cases that would otherwise be negative.
The various p53 antibodies, namely PAb2 and PAb3 and the polyclonal CM1, showed similar staining patterns. Analysis by polymerase chain reaction-restriction fragment length polymorphism (PCR-RFLP), SSCP and direct DNA sequencing identified 29 mutations in the 67 cases studied. Details of the p53 mutations are summarized in Table 3. All 29 mutations were identified within exons 5-8. No mutation was found in exon 4, 9 and 10. Of the 29 mutations, 19 were either C-*T or GeA transitions and 16 of these occurred at CpG dinucleotides. Frequent mutations were found in four (codons 175, 245, 248 and 282) of the five mutation hotspots of p53, but none in the remaining hot spot at codon 273. In addition, a total of five deletions/insertions were identified. SSCP identified 28 of the 29 mutations in the present series. The remaining case had a missense mutation at codon 245 (GGC->GAC) identified by PCR-RFLP.
All cases with missense mutations showed positive immunohistochemical staining, while all those cases with truncated proteins as a result of frameshift or non-sense mutations showed a negative immunostaining result. Without microwave pretreatment, seven cases had stabilized nuclear p53 protein by positive immunostaining but no p53 mutations. In addition, p53 mutation was not found in the three cases that showed positive p53 staining only after microwave treatment.
Using the same methods, very similar results were obtained in the Scottish population, except that this contained no insertion or deletion mutation (Cripps et al, 1994).
We further compared our results with a large international database of p53 mutations (a total of 3720) from various tumours ; 376 were p53 mutations in colorectal tumours and, of these, 25 tumours had deletion/insertion mutations constituting 6.6%. This is lower than the 17.2% (5/29) deletion/ insertion mutations in our present Chinese series (P = 0.036 using Chi-squared test).
DISCUSSION
The data reveal two outstanding features of the epidemiology of colorectal cancer in Hong Kong. First, there are striking differences between the two study periods 1978-82 and 1983-87; the age-standardized incidence rate rose nearly 20% in women and over 22% in men. This rate of increase, approximately 4% per year, is entirely attributable to classical, late-onset (>50 years old) patients. The age-incidence data for Scotland, in contrast, show no change over the same period, although demonstrating a higher overall incidence. Secondly, there is an excess of patients, by up to fourfold, in the younger age groups in Hong Kong as compared with Scotland. As colorectal cancer in this young age group in Scotland has been shown to be associated with constitutional defects in DNA repair (Liu et al, 1995), this observation prompted British Journal of Cancer (1997) 76(12), 1610-1616 (Cripps et al, 1994) and the world database Hollstein et al, 1994) Database Hong Hot spots 175,245,248,273,282 175,245,248,282 175,245,248 consideration of the possibility that the Hong Kong Chinese population gene pool may be enriched in genes conferring susceptibility to colorectal cancer. Some support for this proposition comes from the similar high incidence of cancer in young people in other southern Chinese communities (Singapore and Shanghai) but not in Japanese, Northern Chinese (Tianjin) or the Caucasian populations of the USA, despite the overall high incidence of colorectal cancer in Some of these countries.
We considered the possibility that either of these observations might merely represent artefacts of changes in reporting practice. Health care that is almost free of charge has been universally available in Hong Kong and in Scotland. The Cancer Registries had been in full operation in Hong Kong since 1963 and in Scotland since 1959. The data were largely gathered from in-patient sources, and duplicated entries were prevented by checking the demographic data. Full population census was carried out in Hong Kong every 10 years with a by-census in betwen two full censuses. We have also shown that the component of inappropriate diagnoses included in the same code as colorectal cancer is trivial. Thus, the formal basis of reporting colorectal cancer diagnosis and of conducting population census is very similar in Scotland and in Hong Kong. Moreover, the unusual nature of this cancer in persons less than 35 years of age militates strongly against inaccurate reporting in either country.
A second possible explanation of the high incidence in young Hong Kong Chinese is a 'cohort effect', signalling the arrival within the population of a factor preferentially affecting a younger age group. A clear example of this would be dietary change favoured by younger people but declined by their elders. With the passage of time, the effects of the hypothetical factor might appear in progressively older people, as the population-at-risk ages. This explanation is superficially attractive, as it might encompass both the raised incidence in young people and the rising incidence in their elders. It is insufficient to explain the difference in age-incidence patterns between Hong Kong and Scotland, however, unless it is accepted that a new cohort, with an incidence of colorectal cancer far exceeding that of Scotland, appeared in Hong Kong at or before 1978. Under these circumstances, it would not be anticipated that the rise in incidence after 1982 should be restricted to persons aged over 50 years. Moreover, Japan, which does show the recent rise in overall incidence of the disease, does not share this elevated incidence in young people, while Shanghai, which has only a slightly rising trend, does show high incidence in the young.
Detailed analysis of mutation types in oncogenes and oncosuppressor genes in cancer has sometimes revealed clues to the nature of both environmental carcinogens and constitutional susceptibility (Shields and Harris, 1991). Thus, characteristic patterns in the relative incidence of transversion and transition mutations have been recorded for Ki-ras and p53 in lung cancer in smokers vs non-smokers (Suzuki et al, 1992;Husgafvel Pursiainen et al, 1993;Takeshima et al, 1993;Yang et al, 1995). A predilection for small insertions and deletions in simple nucleotide repeat sequences in APC has been recorded in patients with evidence of defects in mismatch repair (Huang et al, 1996). We therefore searched for characteristic 'signatures' in the mutations in two genes-Ki-ras and p53 -which we have shown to be involved in colorectal cancer in Hong Kong with similar frequency compared with elsewhere. No differences were observed in the mutations in Ki-ras, but the options in this gene are restricted to the small number of nucleotide loci involved in oncogenic transformation. Mutations in p53 allow more variety, and here we demonstrated an unusually high frequency of small insertion and deletion mutation more than twice that of data combined from all currently reported series of colorectal cancers (Table 4) Hollstein et al, 1994). In contrast no mutations of this type were detected in a series of 21 cases gathered by exactly analogous methods from a Scottish population (Cripps et al, 1994). Of the five frameshift type mutations detected in Hong Kong tumours, two are in recognizable target sequences for mismatch repair gene activity. The age spectrum of the Hong Kong and Scottish tumours analysed for p53 mutation was identical, hence these unusual mutations are not included merely because of a bias towards younger patients in the Hong Kong sample.
Although these data must be regarded as suggestive only, they encourage further search for a genetic basis for increased colorectal cancer susceptibility in the Hong Kong Chinese population. |
/*
* Older CPUs require the MMCRA sample bit to be always set, but newer
* CPUs only want it set for some groups. Eventually we will remove all
* knowledge of this bit in the kernel, oprofile userspace should be
* setting it when required.
*
* In order to keep current installations working we force the bit for
* those older CPUs. Once everyone has updated their oprofile userspace we
* can remove this hack.
*/
static inline int mmcra_must_set_sample(void)
{
if (pvr_version_is(PVR_POWER4) || pvr_version_is(PVR_POWER4p) ||
pvr_version_is(PVR_970) || pvr_version_is(PVR_970FX) ||
pvr_version_is(PVR_970MP) || pvr_version_is(PVR_970GX))
return 1;
return 0;
} |
def SHOPIT_PRODUCT_DETAIL_SERIALIZER_FIELDS(self):
default = list(set(self.PRODUCT_SERIALIZER_FIELDS + ['variants', 'attributes']))
return self._setting('SHOPIT_PRODUCT_DETAIL_SERIALIZER_FIELDS', default) |
Precision Firearms LLC has gotten a lot of attention recently for its new “Nintendo Glock.”
The firearm design was inspired by the Nintendo Zapper, a toy gun that was used in the retro Nintendo game “Duck Hunt” in which players would shoot at their television screen with a laser toy gun. Proud of its accomplishment, the custom gun maker posted a picture to its Facebook page. It has since been shared nearly 150,000 times and attracted over 8,000 comments.
Many of the commenters have criticized the gun maker for creating a potentially dangerous weapon that bears an uncanny resemblance to a well-known toy—”Nintendo Zapper” is even written on its side. Some were worried that, if mass produced, the gun could lead to disastrous confusion, especially among children who wouldn’t be able to differentiate the gun from a simple plaything. One Facebook user even said that he thought it was for a Nintendo console.
For comparison, here’s a picture of the Nintendo Glock followed by a picture of the Nintendo Zapper:
After receiving thousands of messages within a matter of days, Precision Firearms decided to respond to critics in a Facebook post. It stated that the Nintendo Glock is a “one off” that was custom made for a friend, and it will never be mass produced. |
<filename>src/test/java/com/nachc/accestocsvtool/util/connection/DatabaseUrlFactoryIntegrationTest.java
package com.nachc.accestocsvtool.util.connection;
import static org.junit.Assert.assertTrue;
import java.io.File;
import org.junit.Test;
import com.nachc.accestocsvtool.aaa.params.TestParams;
import com.nachc.accestocsvtool.util.file.FileUtil;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class DatabaseUrlFactoryIntegrationTest {
@Test
public void shouldGetUrl() {
String testFileName = TestParams.getTestFileResourcePath();
File file = FileUtil.getFile(testFileName);
boolean fileExists = file.exists();
log.info("File exists: " + fileExists);
assertTrue(fileExists == true);
String url = DatabaseUrlFactory.getJdbcConnectionString(file);
log.info("Got url: " + url);
log.info("Done.");
}
}
|
<reponame>dgageot/hc<filename>tlv8/writer.go
package tlv8
import (
"bytes"
"encoding/binary"
"io"
"math"
)
type writer struct {
buf bytes.Buffer
}
func newWriter() *writer {
var buf bytes.Buffer
return &writer{buf}
}
func (wr *writer) bytes() []byte {
return wr.buf.Bytes()
}
func (wr *writer) writeBytes(tag uint8, value []byte) {
buff := bytes.NewBuffer(value)
for {
var bytes = make([]byte, 255)
n, err := io.ReadFull(buff, bytes)
if err == nil || err == io.ErrUnexpectedEOF {
v := bytes[:n]
// Write tag, length, value
b := append([]byte{tag, uint8(n)}, v[:]...)
wr.write(b)
if err == io.ErrUnexpectedEOF { // Fewer than 255 bytes read
break
}
} else {
break
}
}
}
func (wr *writer) writeUint16(tag uint8, v uint16) {
var b [2]byte
binary.LittleEndian.PutUint16(b[:], v)
wr.writeBytes(tag, b[:])
}
func (wr *writer) writeUint32(tag uint8, v uint32) {
var b [4]byte
binary.LittleEndian.PutUint32(b[:], v)
wr.writeBytes(tag, b[:])
}
func (wr *writer) writeInt16(tag uint8, v int16) {
buf := make([]byte, 2)
// little endian
buf[0] = byte(v)
buf[1] = byte(v >> 8)
wr.writeBytes(tag, buf[:2])
}
func (wr *writer) writeInt32(tag uint8, v int32) {
buf := make([]byte, 4)
// little endian
buf[0] = byte(v)
buf[1] = byte(v >> 8)
buf[2] = byte(v >> 16)
buf[3] = byte(v >> 24)
wr.writeBytes(tag, buf[:4])
}
func (wr *writer) writeFloat32(tag uint8, v float32) {
var b [4]byte
math.Float32bits(v)
wr.writeBytes(tag, b[:])
}
func (wr *writer) writeBool(tag uint8, b bool) {
if b == true {
wr.write([]byte{tag, 1, 1})
} else {
wr.write([]byte{tag, 1, 0})
}
}
func (wr *writer) writeString(tag uint8, s string) {
wr.writeBytes(tag, []byte(s))
}
func (wr *writer) writeByte(tag uint8, b byte) {
wr.write([]byte{tag, 1, b})
}
func (wr *writer) write(b []byte) (int, error) {
return wr.buf.Write(b)
}
|
Observations of applying DFM(A) in MW mechanics and sheet metal work
In this paper, approaches to apply DFM(A) (Design for Manufacturing and Assembly) in practice are reviewed based on both the research results of the areas of MW mechanics and sheet metal work achieved at Lappeenranta University of Technology. These results are supported by a literature review. In this paper, we present five view-points on how to apply DFM(A) in practise in the previously mentioned research areas. The view-points are as follows: applied DFM(A) rules for sheet metal work, utilization of lists and forms to analyze MW constructions, development of traditional design methodologies, development of manufacturing technologies for easy production, integrated DFM(A) approaches which aim to control and manage both the product design process and its costs. |
//these tests can be fuzzy around min and max taus but still have to
//satisfy the strict limits of these parameters
bool dbsk2d_ishock_edge::isLTauValid_MinMax (double ltau)
{
return AisGEq(ltau, _minLTau) && AisLEq(ltau,_maxLTau);
} |
/**
* Implementation of MutableProperties interface using an in-memory HashMap to store the properties.
*/
public class MapMutableProperties implements MutableProperties {
private final Map<String, Object> properties = new HashMap<>();
/** Create a new instance with no key-values */
public MapMutableProperties() {
// Do nothing
}
/**
* Create a new instance with key-values from an existing Properties object *
*
* @param properties properties to copy
*/
public MapMutableProperties(Properties properties) {
properties.getAll().forEach(this.properties::put);
}
/**
* Create a new instance with key-values from an existing Map
*
* @param properties properties to copy
*/
public MapMutableProperties(Map<String, Object> properties) {
properties.forEach(this.properties::put);
}
@Override
public Map<String, Object> getAll() {
return properties;
}
@Override
public void set(String key, Object value) {
properties.put(key, value);
}
@Override
public Optional<Object> remove(String key) {
if (properties.containsKey(key)) {
return Optional.of(properties.remove(key));
} else {
return Optional.empty();
}
}
@Override
public String toString() {
return this.getClass().getName()
+ " ["
+ properties.entrySet().stream()
.map(e -> e.getKey() + "=" + e.getValue())
.collect(Collectors.joining(", "))
+ "]";
}
@Override
public int hashCode() {
return Objects.hash(properties);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Properties)) {
return false;
}
Properties p = (Properties) o;
return Objects.equals(properties, p.getAll());
}
} |
// before merging [clazz] into its subtype.
private boolean isStillMergeCandidate(DexProgramClass sourceClass, DexProgramClass targetClass) {
assert isMergeCandidate(sourceClass, targetClass, pinnedTypes);
if (mergedClasses.containsValue(sourceClass.getType())) {
if (Log.ENABLED) {
AbortReason.ALREADY_MERGED.printLogMessageForClass(sourceClass);
}
return false;
}
if ((sourceClass.hasClassInitializer() && targetClass.hasClassInitializer())
|| targetClass.classInitializationMayHaveSideEffects(
appView, type -> type == sourceClass.type)
|| (sourceClass.isInterface()
&& sourceClass.classInitializationMayHaveSideEffects(appView))) {
if (Log.ENABLED) {
AbortReason.STATIC_INITIALIZERS.printLogMessageForClass(sourceClass);
}
return false;
}
boolean sourceCanBeSynchronizedOn =
appView.appInfo().isLockCandidate(sourceClass.type)
|| sourceClass.hasStaticSynchronizedMethods();
boolean targetCanBeSynchronizedOn =
appView.appInfo().isLockCandidate(targetClass.type)
|| targetClass.hasStaticSynchronizedMethods();
if (sourceCanBeSynchronizedOn && targetCanBeSynchronizedOn) {
if (Log.ENABLED) {
AbortReason.SOURCE_AND_TARGET_LOCK_CANDIDATES.printLogMessageForClass(sourceClass);
}
return false;
}
if (targetClass.getEnclosingMethodAttribute() != null
|| !targetClass.getInnerClasses().isEmpty()) {
if (Log.ENABLED) {
AbortReason.UNSUPPORTED_ATTRIBUTES.printLogMessageForClass(sourceClass);
}
return false;
}
if (methodResolutionMayChange(sourceClass, targetClass)) {
if (Log.ENABLED) {
AbortReason.RESOLUTION_FOR_METHODS_MAY_CHANGE.printLogMessageForClass(sourceClass);
}
return false;
}
if (fieldResolutionMayChange(sourceClass, targetClass)) {
if (Log.ENABLED) {
AbortReason.RESOLUTION_FOR_FIELDS_MAY_CHANGE.printLogMessageForClass(sourceClass);
}
return false;
}
return true;
} |
/**
* T: O(logN) S: O(1)
*
* <p>Use binary search to find the possible matching number, proceed until we either confirm that
* there is a perfect root or we run out of candidates.
*/
class Solution {
public boolean isPerfectSquare(int num) {
if (num < 2) {
return true;
}
long lo = 2;
long hi = num / 2;
while (lo <= hi) {
long mid = lo + (hi - lo) / 2;
long guessedSquare = mid * mid;
if (guessedSquare == num) {
return true;
}
if (guessedSquare > num) {
hi = mid - 1;
} else {
lo = mid + 1;
}
}
return false;
}
} |
May 17, 2017
WE ARE Seneca Lake campaigners won an important victory in their years-long fight against fossil fuel expansion in New York state, pushing back plans to store dangerous methane in caverns under Seneca Lake.
Since 2014, activists have organized an innovative campaign of civil disobedience. They won huge support from activists who traveled to the area to protest, and faced police and judges who joined in support of the gas company, jailing peaceful protesters who are trying to protect their health and environment.
Campaigners demonstrated in the worst of icy winter conditions, and over the years, more than 600 people of all ages--some of them in their 90s--went to jail for the cause.
Activists were also successful in mobilizing support from all sectors of the community, including local business owners--the Finger Lakes wineries and restaurants--who are opposed to the gas storage because local jobs and businesses would be threatened by the proposed gas storage and pipelines, with the accompanying inevitable leaks, and water and air pollution.
Climate activists march against fossil fuel development in Seneca Lake, New York (We Are Seneca Lake | Twitter)
The Texas-based company Crestwood had planned to increase the storage of methane in unlined, abandoned salt caverns along the shoreline of the lake, but announced that it was withdrawing these plans last week, despite receiving approval from the Federal Energy Regulatory Commission.
However, even more threatening to the local economy, which is based on agriculture, wineries and tourism, Crestwood has an absurd plan to store the extremely dangerous liquid propane gas (LPG) in lakeside salt caverns. This plan has not yet received a permit from New York state regulators and has been the focus of protest by activists across the state.
THESE PROPOSALS are all part of the relentless push to develop the fracking infrastructure in New York state. While we watch the impact of pipeline leaks in California, South Dakota and Ohio, with explosions, water and air pollution endangering communities, gas companies and local towns continue the push to use New York for the storage and movement of fracked gas.
Frack-friendly Pennsylvania continues to pump gas through New York. More and bigger pipelines are being planned to transport Pennsylvania's gas through New York. A second 16-inch Millennium Pipeline has been built, going through the Southern Tier area.
Work is ongoing on the compressor station in West Windsor, New York, to increase its capacity for gas. Windsor town council members did all they could to support the gas company, over the objections of people who live near the compressor.
Local activist Scott Clarke, who lives near the station, explained that the council changed the town's noise ordinance to 85 decibels from 40 decibels to accommodate the enlargement of the compressor. Clarke and others living near compressors have reported the health impacts suffered by local people, such as rashes, cramps and nosebleeds.
Anti-pipeline activists continue to fight these expansions and were successful in stopping the large Constitution and Northern Access pipelines, which were denied permits because of their threat to the environment.
But there is concern that the pro-fracking Trump government might intervene on the gas companies side, as they did with the Dakota Access Pipeline and Keystone XL. If so, they will be facing well-seasoned campaigners. |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
class WolframFacesTest {
// Use the same config for unit testing as when running as a webservice
@BeforeAll
static void init() throws IOException {
// System.out.println("--------- " + System.getProperty(("com.wolfram.jlink.libdir")));
// System.setProperty("com.wolfram.jlink.libdir", "/usr/local/Wolfram/WolframEngine/12.2/SystemFiles/Links/JLink/");
// System.out.println("--------- " + System.getProperty(("com.wolfram.jlink.libdir")));
Path knownFile = Path.of(Resolver.resolveURL("logback-test.xml").getPath());
String projectRoot = knownFile.getParent().getParent().getParent().toString();
ServiceConfig.initialize(projectRoot + File.separator + "conf" + File.separator + "daner-face-search*.yaml");
}
@Test
void basicTest() throws MalformedURLException, FileNotFoundException {
// Fails with cannot open PNG!?
//String image = Resolver.resolveURL("thispersondoesnotexist.com.jpg").toString();
//assertNotNull(WolframFaces.getSimilarFaces(image, image.endsWith("jpg") ? "JPEG" : "PNG", 2));
// Two calls to test if the state is restored after a call
assertNotNull(WolframFaces.getSimilarFaces("http://17053.dk/pmd.png", "PNG", 2));
SimilarResponseDto response = WolframFaces.getSimilarFaces("http://17053.dk/pmd.png", "PNG", 2);
assertNotNull(response, "Second call should yield a response too");
assertEquals(2, response.getFaces().get(0).getSimilars().size(),
"The number of returnes similars shold be as expected");
}
@Test
void jpegTest() throws MalformedURLException, FileNotFoundException {
String image = "https://thispersondoesnotexist.com/image";
//String image = Resolver.resolveURL("thispersondoesnotexist.com.jpg").toString();
assertNotNull(WolframFaces.getSimilarFaces(image, "auto", 2));
}
@Test
void pngTest() throws MalformedURLException, FileNotFoundException {
String image = "http://17053.dk/pmd.png";
assertNotNull(WolframFaces.getSimilarFaces(image, "auto", 2));
}
} |
def step(self):
if np.linalg.norm(self._pos - self._src_pos) < self._src_radius:
return -1
self._delta_s_e[:, :] = 0
self._hit_rates[:, :, :, :] = np.inf
xs, ys = self.adjacent(self._pos[0]), self.adjacent(self._pos[1])
for i, j in product(xs, ys):
self._diffi = self._pos[0] + (i - 1) - self._ticks
self._diffj = self._pos[1] + (j - 1) - self._ticks
self._dists = np.add.outer(self._diffi ** 2, self._diffj ** 2)
self._dists = np.sqrt(self._dists)
concentrations = self.concentration_map()
self._hit_rates[i, j] = self.rate_map(concentrations)
p_found = self._prob_map[self._dists < self._src_radius].sum()
p_not_found = 1 - p_found
p_miss = self.P_miss(self._hit_rates[i, j])
p_hit = 1 - p_miss
s_hit = self.entropy(self.update_posterior(self._hit_rates[i, j], 1))
s_miss = self.entropy(self.update_posterior(self._hit_rates[i, j], 0))
delta_s_not_found = p_hit * (s_hit - self._s) + p_miss * (s_miss - self._s)
self._delta_s_e[i, j] = p_found * -self._s + p_not_found * delta_s_not_found
argmin_1d = np.argmin(self._delta_s_e)
argmin = np.zeros(2, dtype=int)
argmin[0] = argmin_1d // 3
argmin[1] = argmin_1d % 3
self._pos[0] += argmin[0] - 1
self._pos[1] += argmin[1] - 1
if np.linalg.norm(self._pos - self._src_pos) < self._src_radius:
self._prob_map[:, :] = 0
return -1
hit = self.poisson(self._hit_rates[argmin[0], argmin[1],
self._src_pos[0], self._src_pos[1]])
self.update_posterior(self._hit_rates[argmin[0], argmin[1]], hit, inplace=True)
return self.ij2xy(self._pos), bool(hit) |
<filename>lm_eval/tasks/piqa.py<gh_stars>1-10
import numpy as np
from lm_eval.base import MultipleChoiceTask, rf
from ..metrics import mean
from .common import HFTask
class PiQA(HFTask, MultipleChoiceTask):
DATASET_PATH = "piqa"
DATASET_NAME = None
def has_training_docs(self):
return True
def has_validation_docs(self):
return True
def has_test_docs(self):
return False
def fewshot_description(self):
# TODO: figure out fewshot description
return ""
def _convert_standard(self, doc):
out_doc = {
"goal": doc["goal"],
"choices": [doc["sol1"], doc["sol2"]],
"gold": doc["label"],
}
return out_doc
def doc_to_text(self, doc):
return "Question: " + doc["goal"] + "\nAnswer:"
class PiQACls(PiQA):
def _convert_standard(self, doc):
out_doc = {
"goal": doc["goal"],
"choices": ["A", "B"],
"gold": doc["label"],
}
return out_doc
def doc_to_text(self, doc):
text = "A: " + doc["choices"][0] + "\nB: " + doc["choices"][1] + "\nQuestion: " + doc["goal"] + "\nAnswer:"
return text
class PiQAExtractive(PiQA):
def _convert_standard(self, doc):
out_doc = {
"goal": doc["goal"],
"choices": [doc["sol1"], doc["sol2"]],
"gold": doc["label"],
}
return out_doc
def doc_to_text(self, doc):
text = f"Question: {doc['goal']}\nCandidates:\nA: {doc['choices'][0]}\nB: {doc['choices'][1]}\nAnswer:"
return text
def construct_requests(self, doc, ctx):
choices_prefix = ""
for w1, w2 in zip(doc["choices"][0].split(), doc["choices"][1].split()):
if w1 == w2:
choices_prefix += f" {w1}"
continue
break
lls = [
rf.loglikelihood(ctx, choices_prefix + f" {w1}")[0],
rf.loglikelihood(ctx, choices_prefix + f" {w2}")[0],
]
return lls |
<filename>exercises/014-convert-and-fix-4/main.go
package main
import "fmt"
func main() {
// expected 9.5
age := 2
fmt.Println(7.5 + float64(age))
}
|
<reponame>jgwkmoon/VIM<filename>VIMNavi/app/src/main/java/pnu/stemlab/vimnavi/VIMPoint.java
package pnu.stemlab.vimnavi;
import org.json.JSONObject;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Point;
public class VIMPoint extends VIMPosition {
public VIMPoint(double x,double y) {
super(x, y);
}
public VIMPoint(double x,double y,double z) {
super(x, y, z);
}
public VIMPoint(VIMPosition pos) {
super(pos);
}
public VIMPoint(VIMPoint pt) {
super((VIMPosition)pt);
}
public VIMPoint(JSONObject jsonPoint) {
super(jsonPoint);
}
public Point toJTSPoint() {
GeometryFactory gfactory = new GeometryFactory();
Point jtsPoint = gfactory.createPoint(
new Coordinate(x, y)
);
return jtsPoint;
}
// for testing point overlap
public boolean isPointOverlap(VIMPoint pt) {
return this.x == pt.x && this.y == pt.y;
}
// for distance
public double getDist2(VIMPoint pt) {
double dx = this.x - pt.x;
double dy = this.y - pt.y;
return Math.sqrt(dx*dx + dy*dy);
}
public double getDist3(VIMPoint pt) {
double dx = this.x - pt.x;
double dy = this.y - pt.y;
double dz = this.z - pt.z;
return Math.sqrt(dx*dx + dy*dy + dz*dz);
}
// for direction
public double getDirection(VIMPoint pt) {
//
double dx = pt.x - this.x;
double dist = getDist2(pt);
double cosineValue = dx / dist;
double radianAngle = Math.acos( cosineValue ) ;
double degreeAngle = Math.toDegrees( radianAngle );
double side = pt.y - this.y;
return side>=0.0 ? degreeAngle : 360.0-degreeAngle;
}
// public double getDist2MilliMeterCoord(VIMPoint pt2) {
// // the unit of x and y is millimeter
// // the unit of return value is meter
// double dx = this.x - pt2.x;
// double dy = this.y - pt2.y;
// return Math.sqrt(dx*dx + dy*dy);
// }
// public double getDist2MeterCoord(VIMPoint pt2) {
// // the unit of x and y is meter
// // the unit of return value is meter
// double dx = this.x - pt2.x;
// double dy = this.y - pt2.y;
// return Math.sqrt(dx*dx + dy*dy);
// }
// public double getDist2GPSCoord(VIMPoint pt2) {
// // the unit of x and y is degree (WGS84)
// // the unit of return value is meter
// double x1 = this.x;
// double y1 = this.y;
// double x2 = pt2.x;
// double y2 = pt2.y;
//
// if ((y1 == y2) && (x1 == x2))
// return 0;
// double rtheta = Math.toRadians(x1 - x2);
// double ry1 = Math.toRadians(y1);
// double ry2 = Math.toRadians(y2);
// double dist = Math.sin(ry1) * Math.sin(ry2)
// + Math.cos(ry1) * Math.cos(ry2) * Math.cos(rtheta);
// dist = Math.acos(dist);
// dist = Math.toDegrees(dist);
// dist = dist * 60 * 1853.159616;
// return dist;
// }
public boolean isEqual2D(VIMPoint pt) {
return this.x == pt.x && this.y == pt.y;
}
public boolean isEqual3D(VIMPoint pt) {
return this.x == pt.x && this.y == pt.y && this.z == pt.z;
}
// for moving point
public VIMPoint getMovingPos2DByWeight(VIMPoint target, double weight) {
if(weight==0.0)
return this;
if(weight==1.0)
return target;
double x = this.x + weight*(target.x-this.x);
double y = this.y + weight*(target.y-this.y);
return new VIMPoint(x,y);
}
public VIMPoint getMovingPos3DByWeight(VIMPoint target, double weight) {
if(weight==0.0)
return this;
if(weight==1.0)
return target;
double x = this.x + weight*(target.x-this.x);
double y = this.y + weight*(target.y-this.y);
double z = this.z + weight*(target.z-this.z);
return new VIMPoint(x,y,z);
}
public VIMPoint getMovingPos2DByDist(VIMPoint target, double dist) {
double targetDist = getDist2(target);
if(dist==0.0)
return this;
if(dist==targetDist)
return target;
double weight = dist / targetDist;
return getMovingPos2DByWeight(target, weight);
}
public VIMPoint getMovingPos3DByDist(VIMPoint target, double dist) {
double targetDist = getDist2(target);
if(dist==0.0)
return this;
if(dist==targetDist)
return target;
double weight = dist / targetDist;
return getMovingPos3DByWeight(target, weight);
}
// // direction
// public double getDirection(VIMPoint target) {
// VIMPoint basisAxis = new VIMPoint(this.x + 1.0, this.y);
// double radianCosValue = target.getPointCosinValue(this, basisAxis);
// double side = target.getPointSide(this, basisAxis);
// return Math.toDegrees(radianCosValue) + (side<0.0 ? 180.0 : 0.0);
// }
// public double getPointSide(VIMPoint segSrc, VIMPoint segDst) {
// // return_value > 0: pt is on the left side of the line segment from segSrc to segDst.
// // return_value < 0: pt is on the right side of the line segment from segSrc to segDst.
// // return_value == 0: pt is on the line segment from segSrc to segDst.
// return (segDst.x-segSrc.x)*(this.y-segSrc.y)-(this.x-segSrc.x)*(segDst.y-segSrc.y);
// }
// public double getPointCosinValue(VIMPoint segSrc, VIMPoint segDst) {
// // return_value == 1.0: the angle is 0.0 degree
// // return_value == 0.0: the angle is 90.0 degrees
// // return_value == -1.0: the angle is 180.0 degrees
// double vx = this.x - segSrc.x;
// double vy = this.y - segSrc.y;
// double wx = segDst.x - segSrc.y;
// double wy = segDst.y - segSrc.y;
// double innerProd = vx*wx + vy*wy;
// return innerProd / ( this.getDist2(segSrc) * segDst.getDist2(segSrc) );
// }
public double getFloor() {
return super.z;
}
}
|
The army has firmed up plans to retire its oldest light-utility helicopters, pushing the panic button, as a string of recent crashes have blemished the safety record of the machines.
A South Block source said that the early variants of the Cheetah and Chetak fleet will be replaced by the locally-made advanced light helicopter (ALH), also known as Dhruv.
The source said the army’s aviation wing would replace 41 Cheetahs and Chetaks with new helicopters from the “ALH reserve stock” and the proposal would be sent for Cabinet committee on security approval soon.
The army and air force grounded their fleets of 280 light-utility helicopters last December, concerned if the machines were fit to fly, after three aviators were killed in a crash in West Bengal.
Chequered Past November 30, 2016: A Cheetah helicopter crashes at Sukna in West Bengal, killing three officers
March 11, 2016: A Cheetah helicopter crash lands in Hoshiarpur
February 3, 2015: Army chief Gen Bipin Rawat (then a Lt Gen) survives Cheetah crash in Dimapur
October2, 2014: Three army aviators killed in Cheetah crash near Bareilly
May 23, 2012: Pilot killed in Cheetah crash at Siachen
August 18, 2010: 3 officers die in Cheetah crash in Nagaland
The Cheetah and Chetak helicopters, lifeline of troops in high-altitude areas, including the Siachen glacier, are being cleared for flying in batches by Hindustan Aeronautics Limited (HAL) after carrying out a comprehensive safety check.
The Kamov-226T light utility choppers, to be built with Russia, are to replace these helicopters. However, the $1-billion programme is yet to kick off and the military may have to wait several years for the new machines. Russia will supply 60 helicopters in flyaway condition while the remaining 140 will be manufactured in India.
“The manufacturing line of the ALH is quite stable and the machines are coming out at a pretty steady rate. The Kamovs will take time to come,” a senior officer said. The army operates a mix of 150 Cheetahs and Chetaks.
An IAF Cheetah helicopter that crash landed in Maili village in Hoshiarpur district in March 2016. ( ANI File )
The design of these helicopters is more than 50 years old and their airworthiness is being questioned after a string of mishaps. Nine personnel were killed in six accidents involving these machines during 2012-15. The Cheetahs play a crucial role in supporting the army on the Siachen glacier, one of the world’s highest battlefields, flying at more than 20,000 feet.
A group of army wives met defence minister Manohar Parrikar in 2015, demanding the helicopters be retired. HAL has licence-produced 625 Cheetah and Chetak helicopters. It no longer builds them but is responsible for their maintenance and repair, a cause for concern.
First Published: Jan 07, 2017 08:31 IST |
/**
* Clone
* Allocate an exact copy of the ByteBuffer on the heap and return a pointer
*
* @return A pointer to the newly cloned ByteBuffer. NULL if no more memory available
*/
ByteBuffer* ByteBuffer::clone() {
ByteBuffer* ret = new ByteBuffer(buf.size());
for(unsigned int i = 0; i < buf.size(); i++) {
ret->put(i, (byte)get(i));
}
ret->setReadPos(0);
ret->setWritePos(0);
return ret;
} |
export function isInput(name: string): boolean {
let compare = name.toLowerCase()
return ["input", "textarea", "select"].includes(compare)
}
export function vibrate(pattern: number | number[]): boolean {
return window.navigator.vibrate(pattern)
}
|
// NewRepair creates a controller that periodically ensures that there is no stale data in OVN
func NewRepair(interval time.Duration,
serviceLister corelisters.ServiceLister,
) *Repair {
return &Repair{
interval: interval,
serviceLister: serviceLister,
}
} |
from collections import deque
import sys
sys.setrecursionlimit(10**7)
input = sys.stdin.readline
def solve():
h,w,d = (int(i) for i in input().split())
a = deque([])
for i in range(h):
tmp = list(int(i) for i in input().split())
a.append(tmp)
query = [False]*(h*w)
distance = [0]*(h*w+1)
for i in range(h):
for j in range(w):
query[a[i][j] - 1] = (i,j)
q = int(input())
for i in range(d,h*w):
leftx,lefty = query[i-d]
rightx,righty = query[i]
distance[i] = distance[i-d]+abs(rightx-leftx)+abs(righty-lefty)
#print(distance)
ans = []
for i in range(q):
l,r = (int(m) for m in input().split())
ans.append(distance[r-1]-distance[l-1])
print("\n".join(map(str,ans)))
solve() |
SCALING LIMIT OF THE PRUDENT WALK
We describe the scaling limit of the nearest neighbour prudent walk on Z2, which performs steps uniformly in directions in which it does not see sites already visited. We show that the scaling limit is given by the process Zu = ∫ 3u/7 0 σ11{W (s)≥0}~e1 +σ21{W (s)<0}~e2 ds, u ∈ , where W is the one-dimensional Brownian motion and σ1,σ2 two random signs. In particular, the asymptotic speed of the walk is well-defined in the L1-norm and equals 3 7 . |
<reponame>AlSpinks/deephaven-core
package io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb;
import elemental2.core.Uint8Array;
import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.Ticket;
import jsinterop.annotations.JsOverlay;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
import jsinterop.base.Js;
import jsinterop.base.JsPropertyMap;
@JsType(
isNative = true,
name = "dhinternal.io.deephaven.proto.console_pb.CloseDocumentRequest",
namespace = JsPackage.GLOBAL)
public class CloseDocumentRequest {
@JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL)
public interface ToObjectReturnType {
@JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL)
public interface ConsoleIdFieldType {
@JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL)
public interface GetTicketUnionType {
@JsOverlay
static CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of(
Object o) {
return Js.cast(o);
}
@JsOverlay
default String asString() {
return Js.asString(this);
}
@JsOverlay
default Uint8Array asUint8Array() {
return Js.cast(this);
}
@JsOverlay
default boolean isString() {
return (Object) this instanceof String;
}
@JsOverlay
default boolean isUint8Array() {
return (Object) this instanceof Uint8Array;
}
}
@JsOverlay
static CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType create() {
return Js.uncheckedCast(JsPropertyMap.of());
}
@JsProperty
CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType getTicket();
@JsProperty
void setTicket(
CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket);
@JsOverlay
default void setTicket(String ticket) {
setTicket(
Js.<CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType>uncheckedCast(
ticket));
}
@JsOverlay
default void setTicket(Uint8Array ticket) {
setTicket(
Js.<CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType>uncheckedCast(
ticket));
}
}
@JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL)
public interface TextDocumentFieldType {
@JsOverlay
static CloseDocumentRequest.ToObjectReturnType.TextDocumentFieldType create() {
return Js.uncheckedCast(JsPropertyMap.of());
}
@JsProperty
String getUri();
@JsProperty
double getVersion();
@JsProperty
void setUri(String uri);
@JsProperty
void setVersion(double version);
}
@JsOverlay
static CloseDocumentRequest.ToObjectReturnType create() {
return Js.uncheckedCast(JsPropertyMap.of());
}
@JsProperty
CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType getConsoleId();
@JsProperty
CloseDocumentRequest.ToObjectReturnType.TextDocumentFieldType getTextDocument();
@JsProperty
void setConsoleId(CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType consoleId);
@JsProperty
void setTextDocument(
CloseDocumentRequest.ToObjectReturnType.TextDocumentFieldType textDocument);
}
@JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL)
public interface ToObjectReturnType0 {
@JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL)
public interface ConsoleIdFieldType {
@JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL)
public interface GetTicketUnionType {
@JsOverlay
static CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of(
Object o) {
return Js.cast(o);
}
@JsOverlay
default String asString() {
return Js.asString(this);
}
@JsOverlay
default Uint8Array asUint8Array() {
return Js.cast(this);
}
@JsOverlay
default boolean isString() {
return (Object) this instanceof String;
}
@JsOverlay
default boolean isUint8Array() {
return (Object) this instanceof Uint8Array;
}
}
@JsOverlay
static CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType create() {
return Js.uncheckedCast(JsPropertyMap.of());
}
@JsProperty
CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType getTicket();
@JsProperty
void setTicket(
CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket);
@JsOverlay
default void setTicket(String ticket) {
setTicket(
Js.<CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType>uncheckedCast(
ticket));
}
@JsOverlay
default void setTicket(Uint8Array ticket) {
setTicket(
Js.<CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType>uncheckedCast(
ticket));
}
}
@JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL)
public interface TextDocumentFieldType {
@JsOverlay
static CloseDocumentRequest.ToObjectReturnType0.TextDocumentFieldType create() {
return Js.uncheckedCast(JsPropertyMap.of());
}
@JsProperty
String getUri();
@JsProperty
double getVersion();
@JsProperty
void setUri(String uri);
@JsProperty
void setVersion(double version);
}
@JsOverlay
static CloseDocumentRequest.ToObjectReturnType0 create() {
return Js.uncheckedCast(JsPropertyMap.of());
}
@JsProperty
CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType getConsoleId();
@JsProperty
CloseDocumentRequest.ToObjectReturnType0.TextDocumentFieldType getTextDocument();
@JsProperty
void setConsoleId(CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType consoleId);
@JsProperty
void setTextDocument(
CloseDocumentRequest.ToObjectReturnType0.TextDocumentFieldType textDocument);
}
public static native CloseDocumentRequest deserializeBinary(Uint8Array bytes);
public static native CloseDocumentRequest deserializeBinaryFromReader(
CloseDocumentRequest message, Object reader);
public static native void serializeBinaryToWriter(CloseDocumentRequest message, Object writer);
public static native CloseDocumentRequest.ToObjectReturnType toObject(
boolean includeInstance, CloseDocumentRequest msg);
public native void clearConsoleId();
public native void clearTextDocument();
public native Ticket getConsoleId();
public native VersionedTextDocumentIdentifier getTextDocument();
public native boolean hasConsoleId();
public native boolean hasTextDocument();
public native Uint8Array serializeBinary();
public native void setConsoleId();
public native void setConsoleId(Ticket value);
public native void setTextDocument();
public native void setTextDocument(VersionedTextDocumentIdentifier value);
public native CloseDocumentRequest.ToObjectReturnType0 toObject();
public native CloseDocumentRequest.ToObjectReturnType0 toObject(boolean includeInstance);
}
|
// IsCanceled returns true if the Move's status is `CANCELED`, false otherwise
func (m Move) IsCanceled() *bool {
if m.Status == MoveStatusCANCELED {
return swag.Bool(true)
}
return swag.Bool(false)
} |
/**
* @file methods/approx_kfn/drusilla_select.hpp
* @author Ryan Curtin
*
* An implementation of the approximate furthest neighbor algorithm specified in
* the following paper:
*
* @code
* @incollection{curtin2016fast,
* title={Fast approximate furthest neighbors with data-dependent candidate
* selection},
* author={Curtin, R.R., and Gardner, A.B.},
* booktitle={Similarity Search and Applications},
* pages={221--235},
* year={2016},
* publisher={Springer}
* }
* @endcode
*
* This algorithm, called DrusillaSelect, constructs a candidate set of points
* to query to find an approximate furthest neighbor. The strange name is a
* result of the algorithm being named after a cat. The cat in question may be
* viewed at http://www.ratml.org/misc_img/drusilla_fence.png.
*
* mlpack is free software; you may redistribute it and/or modify it under the
* terms of the 3-clause BSD license. You should have received a copy of the
* 3-clause BSD license along with mlpack. If not, see
* http://www.opensource.org/licenses/BSD-3-Clause for more information.
*/
#ifndef MLPACK_METHODS_APPROX_KFN_DRUSILLA_SELECT_HPP
#define MLPACK_METHODS_APPROX_KFN_DRUSILLA_SELECT_HPP
#include <mlpack/core.hpp>
namespace mlpack {
template<typename MatType = arma::mat>
class DrusillaSelect
{
public:
/**
* Construct the DrusillaSelect object with the given reference set (this is
* the set that will be searched). The resulting set of candidate points that
* will be searched at query time will have size l*m.
*
* @param referenceSet Set of reference data.
* @param l Number of projections.
* @param m Number of elements to store for each projection.
*/
DrusillaSelect(const MatType& referenceSet,
const size_t l,
const size_t m);
/**
* Construct the DrusillaSelect object with no given reference set. Be sure
* to call Train() before calling Search()!
*
* @param l Number of projections.
* @param m Number of elements to store for each projection.
*/
DrusillaSelect(const size_t l, const size_t m);
/**
* Build the set of candidate points on the given reference set. If l and m
* are left unspecified, then the values set in the constructor will be used
* instead.
*
* @param referenceSet Set to extract candidate points from.
* @param l Number of projections.
* @param m Number of elements to store for each projection.
*/
void Train(const MatType& referenceSet,
const size_t l = 0,
const size_t m = 0);
/**
* Search for the k furthest neighbors of the given query set. (The query set
* can contain just one point: that is okay.) The results will be stored in
* the given neighbors and distances matrices, in the same format as the
* NeighborSearch and LSHSearch classes. That is, each column in the
* neighbors and distances matrices will refer to a single query point, and
* the k'th row in that column will refer to the k'th candidate neighbor or
* distance for that query point.
*
* @param querySet Set of query points to search.
* @param k Number of furthest neighbors to search for.
* @param neighbors Matrix to store resulting neighbors in.
* @param distances Matrix to store resulting distances in.
*/
void Search(const MatType& querySet,
const size_t k,
arma::Mat<size_t>& neighbors,
arma::mat& distances);
/**
* Serialize the model.
*/
template<typename Archive>
void serialize(Archive& ar, const uint32_t /* version */);
//! Access the candidate set.
const MatType& CandidateSet() const { return candidateSet; }
//! Modify the candidate set. Be careful!
MatType& CandidateSet() { return candidateSet; }
//! Access the indices of points in the candidate set.
const arma::Col<size_t>& CandidateIndices() const { return candidateIndices; }
//! Modify the indices of points in the candidate set. Be careful!
arma::Col<size_t>& CandidateIndices() { return candidateIndices; }
private:
//! The reference set.
MatType candidateSet;
//! Indices of each point in the reference set.
arma::Col<size_t> candidateIndices;
//! The number of projections.
size_t l;
//! The number of points in each projection.
size_t m;
};
} // namespace mlpack
// Include implementation.
#include "drusilla_select_impl.hpp"
#endif
|
Fiberoptic bronchoscopy as an outpatient procedure.
The advent of flexible fiberoptic bronchoscopy (FOB) has had a dramatic impact on the practice of pulmonary medicine. This procedure is easily performed in widely varied clinical settings,1provides maximal visualization of the tracheobronchial tree,2results in an exceedingly low complication rate,3,4and does not require general anesthesia. Consequently, flexible FOB has emerged as the procedure of choice for the diagnosis and management of most bronchopulmonary disorders requiring bronchoscopic examination and has completely replaced bronchoscopy using the rigid tube, except perhaps in some cases of exceedingly brisk hemoptysis or the aspiration of a large foreign body. See also p 30. Practically all studies of the flexible FOB have thus far been performed in hospitalized patients. Donlan et al, in a preliminary report on 318 flexible FOB procedures in theArchives(138:698-699), first documented the safety and effectiveness of the procedure performed on an outpatient basis. In this |
<reponame>olivier-nouchi/laughing-octo-disco
from flask import Flask
import os
example_app = Flask(__name__)
@example_app.route('/')
def hello_world():
return 'I promise this is my last change. <NAME>'
if __name__ == '__main__':
# Heroku provides environment variable 'PORT' that should be listened on by Flask
port = os.environ.get('PORT')
if port:
# 'PORT' variable exists - running on Heroku, listen on external IP and on given by Heroku port
example_app.run(host='0.0.0.0', port=int(port))
else:
# 'PORT' variable doesn't exist, running not on Heroku, presumably running locally, run with default
# values for Flask (listening only on localhost on default Flask port)
example_app.run()
|
n=int(input())
a=['I','hate']
b=[ 'I','love']
a="I hate"
b="I love"
d={}
d[0]=a
d[1]=d[0]
#d[2]=d[0]+'that'+b
for i in range(2,101):
if i%2==0:
d[i]=d[i-1]+' that '+b
else:
d[i]=d[i-1]+' that '+a
print(d[n]+' it')
|
def message_view(request, sender, receiver):
if request.method == "GET":
return render(request, "chat/messages.html",
{'users': User.objects.exclude(username=request.user.username),
'receiver': User.objects.get(id=receiver),
'messages': Message.objects.filter(sender_id=sender, receiver_id=receiver) |
Message.objects.filter(sender_id=receiver, receiver_id=sender)}) |
/***
* Name: aes
* Discussion: decode data by key
* @param input Plain text character array
* @param key aes key
* @param mode Cipher.ENCRYPT_MODE or Cipher.DECRYPT_MODE
* @return cipher text character array
***/
private static byte[] aes(byte[] input, byte[] key, int mode) {
try {
SecretKey secretKey = new SecretKeySpec(key, AES);
Cipher cipher = Cipher.getInstance(AES);
cipher.init(mode, secretKey);
return cipher.doFinal(input);
} catch (GeneralSecurityException e) {
throw ExceptionUtils.unchecked(e);
}
} |
Extracting material data for superplastic forming simulations
In subatomic particle physics, unstable particles can be studied with a so-called vertex detector placed inside a particle accelerator. A detecting unit close to the accelerator bunch of charged particles must be separated from the accelerator vacuum. A thin sheet with a complex 3D shape prevents the detector vacuum from polluting the accelerator vacuum. Hence, this sheet should be completely leak tight with respect to gases. To produce such a complex thin sheet, superplastic forming can be very attractive if a small number of products is needed. This is a forming process in which a sheet of superplastic material is pressed into a one-sided die by means of gas pressure.
In order to develop a material model which can be used in superplastic forming simulations, uniaxial and biaxial experiments are necessary. The uniaxial, tensile, experiments provide information about the one-dimensional material data, such as the stress as a function of equivalent plastic strain and strain rate. These data are extracted from the experiments by using inverse modeling, i.e. simulation of the tensile experiment. To fit these curves into a general material model, three parts in the uniaxial mechanical behavior are considered: initial flow stress, strain hardening and strain softening caused by void growth. Since failure in superplastic materials is preceded by the nucleation and growth of cavities inside the material, the void volume fractions of the tested specimens were also observed.
A very important factor in this research is the study of the permeability of the formed sheet with respect to gas. If internal voids start to coalesce, through-thickness channels will start to form, thereby providing a gas leak path. To study the twodimensional behavior, including the gas leakage, bulge experiments were performed. Within these experiments, circular sheets were pressed into a cylindrically shaped die. From these experiments it followed that the plastic straining is dependent on an applied backpressure during the forming stage. This backpressure can postpone cavity nucleation and growth. |
On a level, I really did want to say goodbye to my Mom.
Yeah, she'd put me through hell. She'd nearly killed me, and refused to see that she'd done anything wrong. She'd tried to live through me, she'd expected me to take the place of a father I'd never really known. She'd endangered my life out of sheer delusion.
But that was all it had been. Delusion. After my biodad died, she'd spent most of her life in front of the TV. It'd be easy to pin her down as negligent - and make no mistake, she was - but it was a negligence and dissociation from reality born of grief. She took refuge in the escapism, and over time it became her reality. When she tried to push me into the grass, she didn't realise the danger she was putting me in. She'd really come to believe it was as simple as sending an unprepared ten year-old into the wilderness and expecting him to come back a Champion.
And for all that had happened, she'd have liked to see this day. It was, after all, what she'd wanted in the first place. I could have invited her; could have gotten the Professor to promise he wouldn't report the violation. He would have hated it, but if I asked, he would have done it.
But while he wouldn't stop me from inviting my Mom, by that same token of respect I wouldn't ask it of him. After all he'd done to protect me from her, it would be a slap in the face that he didn't deserve.
I didn't know what sort of state she was in these days - the Professor had wielded his influence as a shield to keep her away from me, and what few meetings we'd had were uncomfortable, harried, and brief. She'd lie in wait for me after school, wearing too much makeup and a smile of equal parts desperation and adulation, pressing some candy bar or Pokémon toy into my hands as she spoke breathlessly and without pause. Asking questions about my schoolwork, how my life was going, was the Professor treating me right, look how you've grown, I miss you I love you my boy my precious boy...
I never said anything. I was too scared. She reeked of anxiety, wine, and cheap perfume. I knew I wasn't supposed to be seen with her; I didn't want to be seen with her. She was embarrassing and desperate - a thin, twisted, ghoulish parody of the mother I'd grown up with. She'd keep her hands on my shoulders for a few minutes while she raced through a one-sided conversation, until inevitably the Professor or a teacher would notice and step in. Every time she'd protest with empty reassurances that no, it's fine, it's okay I'm allowed to see him, and every time she'd be ignored. And I'd be hauled away, mute and shaking.
She spent a few short stints in jail for violating restraining orders, and even underwent a brief committal to a psychiatric facility. As the penalties mounted, she came to accept that she'd lost me. The unexpected visits had slowed, then stopped entirely.
She was frightening, manipulative, and negligent in the extreme. She'd almost killed me, and she'd never admit she'd been wrong. She was the worst parent in Pallet Town - and, for all I knew, the world. She'd threatened to kill herself if she couldn't have me, right in front of me - her only son, a boy of twelve. She was abusive, and getting away from her was the best thing that had ever happened to me.
But for all that, she was still my Mom.
One day, I promised myself, I'd come back and see her.
Blue hadn't made it to the farewell ceremony by the tall grass, either. Daisy had knocked on his bedroom door, and when there'd been no answer she'd ducked her head in to check. Gone. No note, nor had anyone seen him leave. It was strange, given how reconciliatory he'd been last night - but perhaps seeing me go on without him was something he didn't want to be around for. He could be unpleasant to be around when he felt slighted. It was unintentional - there was no malice behind it - but he could, and he knew it. His decision not to attend might have been intended as a courtesy.
Naturally, it was all anyone could talk about. Typical. He wasn't even here, and it was still all about Blue.
I leaned against the masonry of the town wall, the open gate to my left providing shade from the early autumn Sun. The gate was eight feet tall and just as wide, solid oak, and always barred come dusk. It also had the particular distinction of being the only city gate in Kanto that opened inward. Naïveté, a gesture of welcome, or just bad design? Nobody was quite sure, and nobody cared to fix it; the gate's quirk had become emblematic.
That was one of the perks of living in Pallet Town - on a peninsula with a shoreline too steep to be accessed by sea and a land passage only traversable through the Reclamationist fortress of Viridian City, there were few serious threats the outside world could pose. It is in such places that practicality cedes ground to tradition.
Leaving aside the notable absences of Blue and my Mom, my departure ceremony boasted an impressive turnout - Daisy and the Professor, obviously, as well as the Professor's Dragonite, his aides, and a few dozen locals - most of them Trainer retirees I'd seen about the Ranch at one time or another. Between the temperate climate, relative safety, low cost of living, and access to the Professor, Pallet Town was a popular destination for former Trainers.
The convoy had even brought a small news team down from Viridian, presently engaged in an interview with the Professor. He was all smiles and enthusiasm, boisterous laughter when the interviewer made even the most passably amusing remark, constantly gesturing towards me in an effort to turn their attention my way. Having been the face of Pokémon research for some twenty years, he was quite used to dealing with the media.
He was also, as he'd confided in me, fucking sick of it.
The TV crew engaged the Professor for what seemed like far too long. The Sun continued to rise, and the heavy gate ceased to provide shade. I pushed myself from the wall and moved towards the town's exit, hoping that my obvious impatience would stir them on. I was ready to go. My backpack, camping supplies, food for myself and Eevee, Pokéballs, the Pokédex the Professor had gifted me with - I was ready. If I could have, I would have simply set off there and then.
But the Professor had impressed upon me the importance of maintaining a good public image, and it was never wise to disregard his advice lightly. Big fight purses were very rare - much more so than the media made it seem - and for most professional Trainers, the key to financial solvency was endorsements. Private Pokémon battles were a zero-sum game, and for every Arc won, there was an Arc lost. Signing on for even a minor advertising contract was worth more than a decent Trainer would make from actual competing in a year, so establishing a friendly relationship with the press was critical from the outset.
It was the only way the Professor could ever have financed his studies. Maintaining a large ranch with cutting-edge equipment and a permanent research team was tremendously draining, even in a place as rural and cheap to live as Pallet Town. He didn't just contract with Silph Co., he was one of their most prominent faces. The easy banter, the air of casual command, the understated sense of authority - they were honed through decades of practice, and more critical to the continued operation of his lab than any amount of scientific expertise could ever be. A fact he lamented, but couldn't afford to deny.
By contrast, Blue couldn't get enough of the media's gaze. The attention, the opportunities for wit and showboating, the chance to shock and amaze an audience - he loved every second. By mere virtue of being related to the Professor, he'd already gotten a taste of the limelight. By entering the competitive circuit, he could fully expect to be bathed in it...which made his absence all the more perplexing. Paparazzi and a crowd – Blue's favourite breakfast.
Rather than draw attention, I forced myself to stare out beyond the gate. The view was calming - but more importantly, I figured the image of a Trainer gazing into the untamed wilds would look striking.
I rested a hand on Eevee's Pokéball, since that seemed like the dashing thing to do.
Beyond the gate, the long grass started abruptly, going from paved concrete to waist-high stalks in a hard line. The grass was maintained as, again, a matter of tradition. The wall was a tacit acknowledgement that wild Pokémon were dangerous to human settlements, but the tall grass freely bordering the town stood as an olive branch.
We will take this much space for ourselves, and not an inch more. We may mingle, we may meet, but beyond this point all is yours. The line between our worlds.
I assume my pose was suitably impressive, because it wasn't long before the news team finally made their way over to me. The camera operator - a twenty-something guy in a short-sleeved beige shirt, irresponsibly tall - was the first to arrive. His hair was a blond mop, his eyes were lined by thick, black plastic glasses, and his chin sported the stupidest, scruffiest goatee I'd ever seen.
"Hey mate," he said. "You ready?"
The reporter followed behind closely – a short, obese, middle-aged man convinced that there was no male pattern baldness that could not be defeated by a sufficiently determined comb-over. Following up behind him was a taller, gaunter man in his forties - sporting a fuller head of neatly-combed hair, blond and grey indistinguishable. Surprisingly muscular for his age, draped in a plain white t-shirt, and bore a large disc made of some reflective fabric.
He started moving about me, placing the disc at different angles and turning to the cameraman for confirmation. The camera operator glanced at his screen, made hand signals this way and that, until eventually giving a grinning thumbs-up. I didn't dare move.
"Got it?" asked the reporter.
"Yeah, we good," said the cameraman.
"I like the light we've got here. Really has pop, you know?" the other man chimed in.
"Mmm, good pop," the cameraman agreed.
The reporter leaned in to see the screen. "Oh, that's nice pop, there. Good pop, good pop."
I stood there, wondering what in the sweet tits they were talking about. The reporter, apparently satisfied with the ambient pop levels, turned his attention to me and thrust a microphone uncomfortably close to my mouth.
"Here we have him, the protégé of Samuel Oak himself!" the reporter half-bellowed. "The Red Seviper! The scion of Pallet! He Who Would Be Champion, in the flesh! Tell me, young lad, what drove you to become a Pokémon Master? The money? The glory? The women?"
"Uh," I said, fingers drumming anxiously against my leg. "Well, depends what you mean by 'Master', exactly. I keep hearing the term, but it always seems really ill-defined."
The reporter kept his broad smile fixed upon his face, but the eyes grew flinty.
"But. Ah. Well, assuming you mean becoming a really good Trainer and such...well, I guess it just always seemed like the thing to do. You know?"
The microphone fell, and the reporter's overbearing enthusiasm was replaced by a sigh of open exasperation.
"Come on, kid. You need to give us more than that. You're continuing a dynasty, here."
I nodded, cheeks flushing.
"Give us the answer again, eh?"
Another nod. He raised the microphone, practically pressing it against my lips this time, and repeated the question - identical to the very syllable, I noted.
"It's something I've always wanted to do," I said, trying to project more confidence than I felt. "It's in my blood, I guess. My biodad was a great Pokémon Trainer, and my other Dad is one of the greatest who's ever lived. Don't know what else I could do, really."
The reporter nodded, lowering the mic.
"Okay, that's better. This time, though, I want a bit more enthusiasm. Something a bit grander, yeah? 'It's my destiny', something like that. You're gonna shatter the Gyms, you're gonna crush the Elite Four, you're gonna tear Lance to pieces and bathe in dragons' blood."
"Uh..."
"Pump it up, y'know? Show some energy, talk with your hands! Move around a bit!"
"Not too much," the cameraman said.
"Don't wanna lose that pop," added the reflector guy.
The reporter shook his head. "Don't lose the pop. Work the pop."
"Be the pop," the cameraman said.
"You are the pop, mate," said the reflector guy. "Keep that in mind, okay?"
This went on for a few iterations, with the team repeating their encouragements and adjusting my stance slightly for maximum pop. By the end, they had me putting a fist forward as I declared I was going to slay the dragons with which Lance had dominated for so long and line my throne room with their skulls.
The Professor's Dragonite seemed uncomfortable.
In retrospect, it was kinda silly, but they were really good at making me feel like I was being impressive. I was even starting to believe it. They were already calling me the Dragonslayer, though that seemed like a title that would take a bit more earning.
Eventually they called a wrap, once there was unanimous agreement that the pop had passed.
With the Sun rapidly approaching its zenith, and a long journey ahead of me, I finally managed to say my last goodbyes.
Daisy, who'd been preoccupied with trying to keep her laughter at a manageable level during the interview, had a broad smile accompanying the water in her eyes. We went in for a tight hug, and when I tried to pull back, she didn't let go.
"It's okay, sis. I'll be back."
"You'd better, or I'm going to shovel so much Ponyta shit in your face, everyone'll think they call you Red for the pinkeye."
I snorted at that, and slowly prised myself from her grip. She let go, reluctantly. Then I turned to the Professor.
His pose was rigid, his face displaying a practised expression of composure. To a casual observer, he probably looked perfectly calm. You'd have to have lived with him for a decade to notice the way his breath caught as he inhaled.
He offered his hand, which I ignored in favour of a full hug.
"Be careful out there, Red," he whispered. "You can always come back."
"Thanks, Dad."
We broke the embrace, and he patted me - just once - on the shoulder.
I turned to the grass. It was a simple ceremony, this, but ancient. I would step into the tall grass, by myself. I would walk forward, the gates would close, and my journey into the world would begin. If there had been another Trainer starting out with me, we would have gone through separately. We would meet up on the road ahead and travel together, but the first step was always taken alone.
Alone, but for one. There was someone else who would take this step with me.
I plucked the Pokéball from my belt, flicking it open. A jagged arc of pale blue energy streaked out, crashed into the concrete at my feet, and condensed into a small scrap of paper.
what
what
huh
what
...
?
I must have stared at that note for half a minute before picking it up. Looking back, I think I was actually willing it to become Eevee, as if reality had simply made a mistake and would remedy the error upon having it pointed out. You open a Pokéball, Pokémon comes out. That's how it works.
But when the note stubbornly insisted upon not being Eevee, I did, reluctantly, pick it up.
I was still too stunned to take the message in. Several lines of text - far too much information to process right then, but that wouldn't be necessary. All I needed to piece it all together was the familiar, three-word catchphrase resting at the end.
Slowly, I turned to the crowd behind me. Daisy, face in hand, arms tensed, scowling through her fingers. The Professor, stoic and controlled. The news crew, camera still running. The onlooking townspeople, whispering amongst themselves.
Everyone, staring at me.
You bastard.
He'd sent Viridian News a message from his Pokédex, telling them precisely when and where he'd be arriving. He told them where to position the cameras for him. At what angle they should point, laterally and vertically.
Over the years, many people have looked at that video, at the image immortalised on posters and billboards the world over, and remarked on what it represented. Look at the raw talent, they say, the potential.
These people do not know Blue. He was, and is, no mere prodigy. He did not stride into that image half-cocked, grinning at his tremendous good fortune. It was no stroke of luck.
When he walked towards Viridian's southern gate, silhouetted by the glorious lilac glow of an autumn sunset, arms outstretched, a roguish grin on his face - with Eevee at his side, a Pidgey on his shoulder, and a fresh-caught Ekans coiled around his right arm, fangs bared at the camera - he knew exactly what he was doing.
Every little detail, calculated to perfection.
Of course he didn't catch and tame two Pokémon in nine hours, while making the trek from Pallet to Viridian. He's a genius, not a magician. Nobody can do that, least of all on their first day as a Trainer.
But it wasn't his first day as a Trainer. He'd been a Trainer for weeks.
Ever since the spat with the Professor, he'd been spending every spare hour roaming through Route 1, secretly using the Professor's Pokémon to catch and train his own. The Pidgey's outspread wings, the Ekans' iconic pose - these weren't the result of something as pedestrian as mere talent. They were the product of dedication bordering on the fanatical, an analytical intellect tutored by the greatest scientist of the era, a painstaking attention to detail...and, yes, a healthy drop of natural ability.
But at the time, I wasn't admiring the brilliant display of showmanship. I was watching the footage of myself gormlessly turning to the camera, a scrap of paper in hand and a look of stunned, dull incomprehension on my face.
I looked like a sad Slowpoke.
He looked like a Champion.
"I'm going to kill him."
Across the dining-room table, the Professor arched an eyebrow.
He waited.
I sighed.
"Obviously I'm not going to kill him."
The eyebrow remained raised.
"Maybe maim him."
The eyebrow soared higher.
"Punch him."
He shrugged, and the eyebrow finally descended. He uncapped a pair of beers, placing one in front of me before taking a pull of his own.
"Why do you do that?" I asked. "Always have to take things so literally?"
He half-shrugged, absorbed in the bottle. He drank rarely enough that the novelty always seemed to fascinate him.
"Trainers are figures of considerable influence, Red, and a great deal of that influence is derived from reputation. When you make an idle threat that you fail to carry out, you look weak, and people will be less inclined to listen to the next one. When you make a declaration, it must carry the weight of truth behind it."
"Fine. I'm going to punch Blue, in the stomach, fairly hard."
He raised his bottle to that.
"See that you do."
We drank. Setting my bottle down, I gazed over his shoulder and out the window. Night. For perhaps the seventh or eighth time, the thought I could just set off tomorrow flashed through my mind for a second before being quashed by but you don't have a Pokémon. It was incredible how many times the same, obviously flawed idea could occur.
"When I catch him, I'll..."
The Professor's eyes flicked up to meet mine. His eyebrow was poised, twitching, just waiting for an excuse. I dropped my gaze to the bottle, allowing my shoulders to slump.
"...can I, even? Get Eevee back?"
He swished a mouthful as he considered the question, swallowing before answering.
"Strictly? Yes. Eevee is registered to you, you can claim him back. You'd have every law backing you."
He hesitated. I didn't fill it in for him. I just waited for him to say it, and jumped in when he did.
"But-" we said, simultaneously.
He stalled at that for a second, scowling slightly, before continuing.
"But it would ruin the story. Being a Trainer is as much image as anything else, and he pulled off a major coup today. If you demand Eevee back, he looks arrogant and entitled."
I stared pointedly.
"That's not good for you either, Red. As far as the media's concerned, you two are rivals now. It may not seem like it, but your position improved tremendously today. This morning, the pair of you were Professor Oak's protégés. Curiosities, two children to keep an eye on. Tonight, you are Red and Blue. Childhood rivals, jostling for supremacy. You may not see it now, but you only lost today's battle. Play this right, and today's events will be remembered as nothing more than the opening shots of a legendary feud. How you react to this will determine both of your images for years to come."
He raised one hand, palm upturned.
"You can demand Eevee back, and have him returned to you. Blue will look like a spoiled child, and you will look resentful and petty."
His raised his other hand in the same manner.
"Or, you can take it in stride. Make an address to the Viridian crew tomorrow. You can laugh, congratulate him on his clever little prank, and tell him that he can keep the Eevee. Tell him that you'll be right behind him, and that if he wants a war, he's got one. The press will eat it up, and both of you will gain prestige."
A minute passed in silence. I rocked my beer around and around on the wooden table, considering his proposition. Finally, I spoke, unable to keep a note of defeat from my voice.
"How long before I can get a starter?"
"Blue's was due to arrive in a week," he said. "A Squirtle."
I nodded, more to myself than to him. Squirtle was a solid Pokémon - fantastic, really - but...
"A week?"
"Yes." An idea seemed to strike him. "Unless..."
I raised my head.
"Unless...you remember that Pikachu I caught yesterday?"
I frowned. "The violent, angry, feral one?"
He nodded.
"The one that electrocuted the first aide who tried to feed it?"
Another nod.
"Who would have killed Jean if she hadn't been wearing a rubber insulation suit?"
Once more, he nodded thoughtfully.
"You're saying I should take a murderous, wild, untamed, Electric mouse? To battle the Ground and Rock Gyms?"
"High-stress situations bring Trainers and Pokémon together faster than anything, Red," he said. "And every day you spend waiting for a starter is another day that Blue pulls further away. How great a lead are you willing to give him?"
I stared at him, mouth slightly open but without words to speak. He couldn't be serious. The Professor raised his hands, palms open.
"I'm simply proposing an option," he said. "Perhaps Brock's Onix is secretly vulnerable to Th...to Thun...Thundershoaaa—"
He held his breath, face tensing in a frown, but he couldn't help himself. The smile broke through, and his fist pounded the table as he burst out laughing.
I rolled my eyes.
"Dammit, Professor," I said. "I actually thought you were serious for a second."
"I am disappointed," the Professor said between chuckles, "that you would think so little of me." |
def load_trackers_signatures(self):
self.signatures = []
with io.open(self.tracker_db,
mode='r',
encoding='utf8',
errors='ignore') as flip:
data = json.loads(flip.read())
for elm in data['trackers']:
self.signatures.append(
namedtuple('tracker',
data['trackers'][elm].keys())(
*data['trackers'][elm].values()))
self._compile_signatures()
self.nb_trackers_signature = len(self.signatures) |
<reponame>mdkamrul/Bridge
import { Component } from '@angular/core';
import { NavController, NavParams } from 'ionic-angular';
import { AlertController } from 'ionic-angular';
import { Db } from '../../app/db/Db';
import { LeadPage } from '../lead/lead';
@Component({
selector: 'page-home',
templateUrl: 'home.html'
})
export class NewGamePage {
gameName = '';
items: Array<any>;
currentBride : any;
weLead = [];
theyLead = [];
constructor(public navCtrl: NavController, private alertCtrl: AlertController, private db: Db, public navParams: NavParams) {
var listBridge = this.navParams.get('listBridge');
if (listBridge != null) {
this.currentBride = listBridge;
}
if (this.currentBride == null){
this.gameNameDialog();
this.items = [];
}
else{
this.items = this.db.getBridgLeadList(this.currentBride.bridgeId);
}
//this.splitLead();
}
ionViewDidEnter(){
if (this.currentBride != null){
this.items = this.db.getBridgLeadList(this.currentBride.bridgeId);
this.splitLead();
}
}
onNewLeadClick(){
this.navCtrl.push(LeadPage,{
currentBride: this.currentBride
});
}
onGameResultBtnClick(){
this.showResult();
}
onLeadItemClick($event, item){
console.log(item);
}
splitLead(){
this.weLead = [];
this.theyLead = [];
for(var i = 0; i < this.items.length; i++){
var lead = this.items[i];
if (lead.bidWinner == 'We'){
this.weLead.push(lead);
}
else{
this.theyLead.push(lead);
}
}
console.log(this.weLead);
console.log(this.theyLead);
}
showResult(){
let leadNumber = this.items.length + 1;
let alert = this.alertCtrl.create({
title: 'Lead Number : ' + leadNumber,
inputs: [
{
name: 'gameName',
placeholder: 'Game Name'
},
{
name : 'call',
label : 'Call',
type: 'radio',
value : '8'
},
{
name: 'call',
label: 'Call',
type: 'radio',
value : '9'
},
{
name: 'wePoint',
placeholder: "Lead Point"
}
],
buttons: [
{
text: 'Ok',
role: 'cancel',
handler: data => {
}
}
]
});
alert.present();
}
gameNameDialog() {
let alert = this.alertCtrl.create({
title: 'Game Name',
inputs: [
{
name: 'gameName',
placeholder: 'Game Name'
}
],
buttons: [
{
text: 'Cancel',
role: 'cancel',
handler: data => {
console.log('Cancel clicked');
}
},
{
text: 'Ok',
handler: data => {
data.startTime = new Date();
data.bridgeId = this.db.getNextBridgeId();
this.gameName = data.gameName;
this.db.bridgeList.push(data);
this.currentBride = data;
this.db.playingBrige = data;
console.log(data);
}
}
]
});
alert.present();
}
}
|
import { join } from "path";
import { Connection, createConnection, getConnectionOptions } from "typeorm";
export async function createConnectionPool(): Promise<Connection> {
const connectionOptions = await getConnectionOptions();
Object.assign(connectionOptions, {
synchronize: false,
logging: true,
// entities: [
// // "dist/src/secondary-adapters/dal/postgres/**/*.orm-entity{.ts,.js}",
// // "src/secondary-adapters/dal/postgres/**/*.orm-entity{.ts,.js}",
// ],
entities: [join(__dirname, "**", "*.orm-entity.{ts,js}")],
migrations: ["dist/src/migrations/*.js"],
migrationsDir: "dist/src/migrations",
migrationsRun: true,
});
return await createConnection(connectionOptions);
}
|
/**
* @author Oleg Ilyenko
*/
public class FileChangeEventImpl extends BaseServerRepositoryUserEvent implements FileChangeEvent {
private final String path;
private final String refName;
private final Type type;
public FileChangeEventImpl(User user, ConfigurableGitSshServer server, RepositoryInfo repositoryInfo, String path, ReceiveCommand command) {
super(user, server, repositoryInfo);
this.path = path;
this.refName = command.getRefName();
this.type = GitUtil.convert(command.getType());
}
public String getPath() {
return path;
}
public String getRefName() {
return refName;
}
public Type getType() {
return type;
}
} |
import { InteractionEvent } from "pixi.js";
import { BasicCheckButton } from "./BasicCheckButton";
/**
* 排他的に選択可能なボタン。ラジオボタンのセットはBasicRadioButtonManagerで設定する。
*/
export declare class BasicRadioButton extends BasicCheckButton {
/**
* ボタンを選択する。
* @param evt
*/
selectButton(evt?: InteractionEvent): void;
/**
* 現在のボタンの有効、無効状態を取得する。
* ラジオボタンは選択中も操作が無効となる。
* @return ボタンが有効か否か
*/
protected checkActivity(): boolean;
}
//# sourceMappingURL=BasicRadioButton.d.ts.map |
def parse_messages(messages):
return [parse_message(message) for message in messages] |
Maloney and Smith Method for Modeling Debye-Media Thin Sheets in the FDTD Grid
The Maloney and Smith method used to account for sheets thinner than the cell size of the finite-difference time-domain (FDTD) method is extended to the case where both the sheet and the surrounding media are such dispersive Debye media as the Human Body tissues. The new method is then compared to the simpler Luebbers and Kuntz method that was previously extended to Debye media. The comparison relies on the reflection and transmission of plane waves and on 3-D experiments. It is concluded that the Maloney and Smith method permits a better accuracy to be achieved. It provides accurate reflection and transmission whatever may be the incidence angle of the wave that strikes the sheet. |
// Program Abstract Syntax Tree
static abstract class Syntax {
Syntax _par; // Parent in the AST.
Syntax[] _kids; // Children in the AST.
HMType _hm; // The Hindley-Milner type
// Find the H-M type for this node, strictly by looking at the children H-M
// type and adding any constraints.
abstract HMType hm(Worklist work); // Hindley-Milner effect for this AST node
// Prep call: gather unique IDs and find/set the non-gen IDs for this AST
// node and subtree.
abstract void get_ids(VStack vstk, Worklist work);
// Add self to the worklist, IFF kids have already computed an initial H-M type.
protected final void add_work(Worklist work) { if( all_kids_ready() ) work.push(this); }
// Add neighbors (kids, parent) and NOT self to the worklist.
final void add_neighbors(Worklist work) {
if( _par!=null ) _par.add_work(work);
if( _kids!=null )
for( Syntax kid : _kids )
kid.add_work(work);
}
// Child classes inspect their kids
final boolean all_kids_ready() {
if( _kids==null ) return true;
for( Syntax kid : _kids ) if( kid._hm==null ) return false;
return true;
}
// Progress if _hm is not null, and a call to hm() either returns something
// not 'eq' to _hm or unifies with anything.
abstract boolean progress();
// Check that every node which can make progress is on the worklist
boolean check_progress(Worklist work) {
if( all_kids_ready() ) // If kids are not ready, then cannot compute hm() so not on worklist
if( _hm==null || progress() ) // Progress is possible
if( !work.has(this) ) // Not on worklist?
return false; // Fails check
if( _kids!=null ) // For all kids
for( Syntax kid : _kids )
if( !kid.check_progress(work) ) // Recursively check nodes that can make progress on worklist
return false;
return true;
}
} |
I recently tweaked a bit my PS1. The code as follows:
PS1='$(if [[ $? != 0 ]]; then echo " \e[91m\e[0m"; fi) \u $(assemble_prompt)$ '
The missing char is from nerd-fonts and doesn't matter in my question (as well as assemble_prompt function).
The problem I encountered is a broken PS1 when I shrink terminal window size to the size of a prompt (approx.). Then it seem to ignore PS1 setting and sets PS1 to $ chars only (see screenshot). Note then when I start typing it simply overrides my custom prompt in this occasion.
I narrowed the problem to an if clause that adds "fail" char if previous command was unsuccessful. If I remove that part everything works as expected.
Is there a way to keep if clause part but fix the issue with PS1 reset when the window size is too small? |
/**
* Stores inheritance options for {@link OwnershipPlugin}.
* This section is attached as an advanced section to {@link OwnershipPluginConfiguration}.
* These options has been created
* @author Oleg Nenashev
* @since 0.9
*/
public class InheritanceOptions implements Describable<InheritanceOptions> {
public static final InheritanceOptions DEFAULT = new InheritanceOptions(false);
private final boolean blockInheritanceFromItemGroups;
@DataBoundConstructor
public InheritanceOptions(boolean blockInheritanceFromItemGroups) {
this.blockInheritanceFromItemGroups = blockInheritanceFromItemGroups;
}
/**
* Blocks ownership inheritance from {@link ItemGroup}s.
* This inheritance is used in {@link JobOwnerHelper} and {@link FolderOwnershipHelper}
* in order to retrieve the info from parent folders.
* Such inheritance may impact the performance of Jenkins instance, hence it is possible to disable it.
* @return {@code true} if ownership inheritance should be blocked.
*/
public boolean isBlockInheritanceFromItemGroups() {
return blockInheritanceFromItemGroups;
}
@Extension
public static final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
@Override
public DescriptorImpl getDescriptor() {
return DESCRIPTOR;
}
public static class DescriptorImpl extends Descriptor<InheritanceOptions> {
@Override
public String getDisplayName() {
return "N/A";
}
}
} |
/*
Copyright © 2021 NAME HERE <EMAIL ADDRESS>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package create
import (
"errors"
"os"
"github.com/harshithmullapudi/airbyte/logger"
"github.com/spf13/cobra"
)
// checkCmd represents the get command
var CreateCmd = &cobra.Command{
Use: "create",
Short: "Create Sources/Destinations/Connections",
Long: `Create Sources/Destinations/Connections from a config folder. You need to have SOURCE_CONNECTION.yaml(sources), DESTINATION_CONNECTION.yaml(destinations), STANDARD_SYNC.yaml(connections) files.
Note: This will neglect if the entity is already created.
`,
Run: func(cmd *cobra.Command, args []string) {
configFolder, _ := cmd.Flags().GetString("folder")
create, _ := cmd.Flags().GetBool("create")
if configFolder == "" {
logger.Error("provide config folder")
cobra.CheckErr(errors.New("provide config folder"))
}
// Check if config folder exist
_, err := os.Stat(configFolder)
if os.IsNotExist(err) {
logger.Error("No config folder found")
cobra.CheckErr(err)
}
// Start with sources
logger.Debug("Starting sources creation")
CreateSources(configFolder, create)
// Create destinations
logger.Debug("Starting destinations creation")
CreateDestinations(configFolder, create)
// Create Connections
logger.Debug("Starting connections creation")
CreateConnections(configFolder, create)
},
}
func init() {
CreateCmd.PersistentFlags().StringP("folder", "f", "", "Config folder")
CreateCmd.PersistentFlags().BoolP("create", "c", false, "Setting this to false will only validate sources and doesn't create")
}
|
<reponame>changmeng72/leecode_python3
class Solution:
def decode(self, encoded: List[int], first: int) -> List[int]:
encoded.insert(0,first)
for i in range(1,len(encoded)):
encoded[i] = encoded[i] ^ encoded[i-1]
return encoded |
import { SetMetadata } from '@nestjs/common';
import { APP_GUARD } from '@nestjs/core';
import { JwtAuthGuard } from './auth/jwt-auth.guard';
export const GLOBAL_AUTH_CONFIG = {
provide: APP_GUARD,
useClass: JwtAuthGuard,
};
export const IS_PUBLIC_KEY = 'isPublic';
export const Public = () => SetMetadata(IS_PUBLIC_KEY, true);
|
import os
import filecmp
import pytest
from ..resetter import FileResetter
CONDITIONS_FILE = "tests/conditions.py"
VERIFICATION_FILE = "tests/verification_file.py"
RUNTIME_FILE = "tests/runtime_file.py"
ISOLATED_FNS_FILE_PATH = "tests/isolated_fns.json"
EXPECTED_DEFAULT_CONDITIONS_FILE = "tests/expected/expected_default_conditions.py"
EXPECTED_DEFAULT_VERIFICATION_FILE = (
"tests/expected/expected_default_verification_file.py"
)
EXPECTED_DEFAULT_RUNTIME_FILE = "tests/expected/expected_default_runtime_file.py"
EXPECTED_ISOLATED_FNS_FILE_PATH = "tests/expected/expected_default_isolated_fns.json"
resetter = FileResetter(
CONDITIONS_FILE, VERIFICATION_FILE, RUNTIME_FILE, ISOLATED_FNS_FILE_PATH
)
@pytest.fixture(autouse=True)
def run_before_and_after_tests():
"""Fixture to execute setup and cleanup"""
yield # this is where the testing happens
# Cleanup
if os.path.exists(CONDITIONS_FILE):
os.remove(CONDITIONS_FILE)
if os.path.exists(VERIFICATION_FILE):
os.remove(VERIFICATION_FILE)
if os.path.exists(RUNTIME_FILE):
os.remove(RUNTIME_FILE)
if os.path.exists(ISOLATED_FNS_FILE_PATH):
os.remove(ISOLATED_FNS_FILE_PATH)
def test_resetter_reset_conditions_file():
resetter.reset_conditions_file()
assert os.path.exists(CONDITIONS_FILE) == True
assert (
filecmp.cmp(
CONDITIONS_FILE,
EXPECTED_DEFAULT_CONDITIONS_FILE,
shallow=False,
)
== True
)
def test_resetter_reset_verification_file():
resetter.reset_verification_file()
assert os.path.exists(VERIFICATION_FILE) == True
assert (
filecmp.cmp(
VERIFICATION_FILE,
EXPECTED_DEFAULT_VERIFICATION_FILE,
shallow=False,
)
== True
)
def test_resetter_reset_runtime_file():
resetter.reset_runtime_file()
assert os.path.exists(RUNTIME_FILE) == True
assert (
filecmp.cmp(
RUNTIME_FILE,
EXPECTED_DEFAULT_RUNTIME_FILE,
shallow=False,
)
== True
)
def test_resetter_reset_isolated_fns_file():
resetter.reset_isolated_fns_file()
assert os.path.exists(ISOLATED_FNS_FILE_PATH) == True
assert (
filecmp.cmp(
ISOLATED_FNS_FILE_PATH,
EXPECTED_ISOLATED_FNS_FILE_PATH,
shallow=False,
)
== True
)
|
/**
* Panel to configure ResultProcessor rules
*
* @author Leonid Schwenke, DECOIT GmbH
*
*/
@SuppressWarnings("serial")
public class MatcherPanel extends JPanel {
private JTextField textField;
private JTextField attributeValueTextField;
private JTextField typeValueTextfield;
private JPanel valuePanel;
private JButton addButton;
private JButton removeButton;
private JCheckBox chckbxSkip;
private final Logger logger = LoggerFactory.getLogger(this.getClass());
/**
* Create the panel.
*
* @param dialog
*/
public MatcherPanel() {
logger.debug("Creating MatcherPanel");
setLayout(new BorderLayout(0, 0));
JPanel buttonPanel = new JPanel();
add(buttonPanel, BorderLayout.NORTH);
addButton = new JButton("+");
buttonPanel.add(addButton);
removeButton = new JButton(" - ");
removeButton.setEnabled(false);
buttonPanel.add(removeButton);
chckbxSkip = new JCheckBox("skip");
buttonPanel.add(chckbxSkip);
valuePanel = new JPanel();
add(valuePanel);
valuePanel.setLayout(new BoxLayout(valuePanel, BoxLayout.Y_AXIS));
addButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
logger.debug("Adding row!");
valuePanel.add(getNewRow());
removeButton.setEnabled(true);
updateUI();
// pack();
}
});
removeButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
logger.debug("removing row!");
if (valuePanel.getComponentCount() <= 1) {
removeButton.setEnabled(false);
}
valuePanel.remove(valuePanel.getComponentCount() - 1);
updateUI();
// pack();
}
});
chckbxSkip.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
logger.debug("Using Skip!");
addButton.setVisible(!chckbxSkip.isSelected());
removeButton.setVisible(!chckbxSkip.isSelected());
valuePanel.setVisible(!chckbxSkip.isSelected());
updateUI();
// pack();
}
});
logger.debug("Creating MatcherPanel complete!");
}
@SuppressWarnings("unchecked")
public void setValue(String values) {
logger.debug("Saving matcherPanel");
String[] settings = values.split(",");
for (String setting : settings) {
String[] split = setting.split("_", 4);
String name = split[0];
if (name.equals("skip")) {
chckbxSkip.doClick();
} else {
String attribute = null;
String operator = null;
String value = null;
String filter = "matches";
if (split.length == 4) {
attribute = split[1];
operator = split[2];
value = split[3];
switch (operator) {
case "c":
filter = "contains";
break;
case "m":
filter = "matches";
break;
default:
filter = "matches";
}
}
addButton.doClick();
JPanel newPanel = (JPanel) valuePanel.getComponent(valuePanel
.getComponentCount() - 1);
valuePanel.add(newPanel);
((JTextField) newPanel.getComponent(1)).setText(name);
((JTextField) newPanel.getComponent(3)).setText(attribute);
((JComboBox<String>) newPanel.getComponent(4))
.setSelectedItem(filter);
((JTextField) newPanel.getComponent(5)).setText(value);
}
}
}
@SuppressWarnings("unchecked")
public String getValues() {
logger.debug("Loading MatcherPanel values");
String entry = "";
if (chckbxSkip.isSelected()) {
entry = "skip";
} else {
for (int i = 0; i < valuePanel.getComponentCount(); i++) {
if (i != 0) {
entry = entry + ",";
}
JPanel panel = (JPanel) valuePanel.getComponent(i);
String name = ((JTextField) panel.getComponent(1)).getText();
String attribute = ((JTextField) panel.getComponent(3))
.getText();
String filter = (String) ((JComboBox<String>) panel
.getComponent(4)).getSelectedItem();
String value = ((JTextField) panel.getComponent(5)).getText();
switch (filter) {
case "contains":
filter = "c";
break;
case "matches":
filter = "m";
break;
}
entry = entry + name + "_" + attribute + "_" + filter + "_"
+ value;
}
}
return entry;
}
/**
* Create a new configuration row
*
* @return
* new row
*/
private JPanel getNewRow() {
logger.debug("creating new row!");
JPanel rowPanel = new JPanel();
rowPanel.setLayout(new FlowLayout(FlowLayout.CENTER, 5, 5));
JLabel newTypeLabel = new JLabel("Type:");
rowPanel.add(newTypeLabel);
typeValueTextfield = new JTextField();
rowPanel.add(typeValueTextfield);
typeValueTextfield.setColumns(10);
JLabel newAttributeLabel = new JLabel("Attribute:");
rowPanel.add(newAttributeLabel);
attributeValueTextField = new JTextField();
rowPanel.add(attributeValueTextField);
attributeValueTextField.setColumns(10);
JComboBox<String> matchValueCombobox = new JComboBox<String>();
matchValueCombobox.setModel(new DefaultComboBoxModel<String>(new String[] {
"contains", "matches" }));
rowPanel.add(matchValueCombobox);
textField = new JTextField();
textField.setColumns(10);
rowPanel.add(textField);
return rowPanel;
}
} |
def parse(cls, string):
if m := cls._Pattern.fullmatch(string):
m_onlytop = m.group(1)
m_top = m.group(2)
m_mode = m.group(4)
if m_onlytop is not None:
top = len(m_onlytop) - 1
return cls(top, mode=-1, path=())
else:
top = 0
mode = 0
if m_top:
top = max(0, len(m_top) - 1)
mode = -1
if m_mode:
mode = 0 if m_mode == '+' else len(m_mode)
path = tuple((m.group(3) + m.group(5)).split('.'))
return cls(top, mode, path)
else:
return None |
h,w,m = map(int,input().split())
l = [list(map(int,input().split())) for _ in range(m)]
count_h = [0 for i in range(h)]
count_w = [0 for i in range(w)]
for j,k in l:
count_h[j-1] += 1
count_w[k-1] += 1
h_max = max(count_h)
w_max = max(count_w)
h_kouho = []
ko_hh = [0 for _ in range(h)]
ko_ww = [0 for _ in range(w)]
w_kouho = []
ans = h_max + w_max -1
for i in range(h):
if count_h[i]==h_max:
h_kouho.append(i)
ko_hh[i] = 1
for j in range(w):
if count_w[j]==w_max:
w_kouho.append(j)
ko_ww[j] = 1
ans_kouho = len(h_kouho) * len(w_kouho)
su = 0
for j,k in l:
if ko_hh[j-1]==1 and ko_ww[k-1]==1:
su += 1
if su==ans_kouho:
print(ans)
else:
print(ans+1)
|
Dynamic Visualization of Stress/Strain Distribution and Fatigue Crack Propagation by an Organic Mechanoresponsive AIE Luminogen
Stress exists ubiquitously and is critically important for the manufacturing industry. Due to the ultrasensitive mechanoresponse of the emission of 1,1,2,2,‐tetrakis(4‐nitrophenyl)ethane (TPE‐4N), a luminogen with aggregation‐induced emission characteristics, the visualization of stress/strain distributions on metal specimens with a pure organic fluorescent material is achieved. Such a fluorescence mapping method enjoys the merits of simple setup, real‐time, full‐field, on‐site, and direct visualization. Surface analysis shows that TPE‐4N can form a nonfluorescent, crystalline uniform film on the metal surface, which cracks into fluorescent amorphous fragments upon mechanical force. Therefore, the invisible information of the stress/strain distribution of the metal specimens are transformed to visible fluorescent signals, which generally matches well but provides more details than software simulation. Remarkably, fatigue crack propagation in stainless steel and aluminum alloy can be observed and predicted clearly, further demonstrating the ultrasensitivity and practicability of TPE‐4N. |
<reponame>JerryYangKai/TeamsFx-Samples
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import axios from "./axios-decorator";
import { getBaseUrl } from '../configVariables';
import { IDiscoverPost } from "../components/card-view/discover-wrapper-page";
import { AxiosResponse } from "axios";
const baseAxiosUrl = getBaseUrl() + '/api';
/**
* Get discover posts for tab
* @param pageCount Current page count for which posts needs to be fetched
*/
export const getDiscoverPosts = async (pageCount: number): Promise<AxiosResponse> => {
const url = `${baseAxiosUrl}/posts?pageCount=${pageCount}`;
return await axios.get(url);
};
/**
* Update post content details
* @param postContent Post details object to be updated
*/
export const updatePostContent = async (postContent: IDiscoverPost): Promise<AxiosResponse> => {
const url = `${baseAxiosUrl}/posts/${postContent.postId}`;
return await axios.put(url, postContent as unknown as {[key:string]:unknown});
};
/**
* Add new post
* @param postContent Post details object to be added
*/
export const addNewPostContent = async (postContent: IDiscoverPost): Promise<AxiosResponse> => {
const url = `${baseAxiosUrl}/posts`;
return await axios.post(url, postContent as unknown as {[key:string]:unknown});
};
/**
* Delete post from storage
* @param post Id of post to be deleted
*/
export const deletePost = async (post: IDiscoverPost): Promise<AxiosResponse> => {
const url = `${baseAxiosUrl}/posts/${post.postId}`;
return await axios.delete(url);
};
/**
* Add user vote
* @param postDetails Post to vote
*/
export const addUserVote = async (postDetails: { [key: string]: unknown; }): Promise<AxiosResponse> => {
const url = `${baseAxiosUrl}/vote/${postDetails.postId}`;
return await axios.post(url);
};
/**
* delete user vote
* @param userVote Vote object to be deleted from storage
*/
export const deleteUserVote = async (postDetails: IDiscoverPost): Promise<AxiosResponse> => {
const url = `${baseAxiosUrl}/vote/${postDetails.postId}`;
return await axios.delete(url);
}; |
// ******************** User Y-Coordinate Translate Function *******************
// Translate the user inputted y-coordinate to a screen coordinate
int ytranslate (double y, double yborder)
{
float percent;
int maxy = getmaxy();
int y_value;
percent = -y / yborder;
y_value = maxy/2 + percent * (maxy/2);
return (int)y_value;
} |
def run(self):
config_error = self.check_logging_config()
if config_error:
msg = ("The following Fluentd logging configuration problem was found:"
"\n{}".format(config_error))
return {"failed": True, "msg": msg}
return {} |
def half_crop_image(self, im, position, label=False):
overlap = 210
w = im.shape[1]
if position == 'left':
crop = im[:, :(w / 2 + overlap)]
if label:
crop[:, (w / 2):(w / 2 + overlap)] = 255
elif position == 'right':
crop = im[:, (w/2 - overlap):]
if label:
crop[:, :overlap] = 255
else:
raise Exception("Unsupported crop")
return crop |
// Unfocus the window and any children (eg. when leaving `focus parent`)
static void seat_send_unfocus(struct wls_transaction_node *node, struct sway_seat *seat) {
sway_cursor_constrain(seat->cursor, NULL);
wlr_seat_keyboard_notify_clear_focus(seat->wlr_seat);
if (node->type == N_OUTPUT) {
output_for_each_window(node->sway_output, send_unfocus, seat);
} else if (node->type == N_WINDOW) {
send_unfocus(node->wls_window, seat);
}
} |
/**
* Finds the ColumnType enum corresponding to the incoming Java class.
*
* @param cls
* @return
*/
public static ColumnType convertColumnType(Class<?> cls) {
if (cls == null) {
throw new IllegalArgumentException("Class cannot be null");
}
final ColumnType type;
if (cls == String.class) {
type = ColumnType.STRING;
} else if (cls == Boolean.class || cls == boolean.class) {
type = ColumnType.BOOLEAN;
} else if (cls == Character.class || cls == char.class || cls == Character[].class || cls == char[].class) {
type = ColumnType.CHAR;
} else if (cls == Byte.class || cls == byte.class) {
type = ColumnType.TINYINT;
} else if (cls == Short.class || cls == short.class) {
type = ColumnType.SMALLINT;
} else if (cls == Integer.class || cls == int.class) {
type = ColumnType.INTEGER;
} else if (cls == Long.class || cls == long.class || cls == BigInteger.class) {
type = ColumnType.BIGINT;
} else if (cls == Float.class || cls == float.class) {
type = ColumnType.FLOAT;
} else if (cls == Double.class || cls == double.class) {
type = ColumnType.DOUBLE;
} else if (cls == BigDecimal.class) {
type = ColumnType.DECIMAL;
} else if (Number.class.isAssignableFrom(cls)) {
type = ColumnType.NUMBER;
} else if (Map.class.isAssignableFrom(cls)) {
type = ColumnType.MAP;
} else if (List.class.isAssignableFrom(cls)) {
type = ColumnType.LIST;
} else if (Set.class.isAssignableFrom(cls)) {
type = ColumnType.SET;
} else if (cls == java.sql.Date.class) {
type = ColumnType.DATE;
} else if (cls == Timestamp.class) {
type = ColumnType.TIMESTAMP;
} else if (cls == Time.class) {
type = ColumnType.TIME;
} else if (Date.class.isAssignableFrom(cls)) {
type = ColumnType.TIMESTAMP;
} else if (cls == UUID.class) {
type = ColumnType.UUID;
} else if (cls == InetAddress.class) {
type = ColumnType.INET;
} else {
type = ColumnType.OTHER;
}
return type;
} |
/// return the distance to another object
pub fn distance_to(&self, other: &Object) -> f32 {
let dx = other.x - self.x;
let dy = other.y - self.y;
((dx.pow(2) + dy.pow(2)) as f32).sqrt()
} |
// String returns a complete task string in todo.txt format.
//
// Contexts, Projects and additional tags are alphabetically sorted,
// and appended at the end in the following order:
// Contexts, Projects, Tags
//
// For example:
// "(A) 2013-07-23 Call Dad @Home @Phone +Family due:2013-07-31 customTag1:Important!"
func (task Task) String() string {
var sb strings.Builder
if task.Completed {
sb.WriteString("x ")
if task.HasCompletedDate() {
sb.WriteString(fmt.Sprintf("%s ", task.CompletedDate.Format(DateLayout)))
}
}
if task.HasPriority() && (!task.Completed || !RemoveCompletedPriority) {
sb.WriteString(fmt.Sprintf("(%s) ", task.Priority))
}
if task.HasCreatedDate() {
sb.WriteString(fmt.Sprintf("%s ", task.CreatedDate.Format(DateLayout)))
}
sb.WriteString(task.Todo)
if task.HasContexts() {
sort.Strings(task.Contexts)
for _, context := range task.Contexts {
sb.WriteString(fmt.Sprintf(" @%s", context))
}
}
if task.HasProjects() {
sort.Strings(task.Projects)
for _, project := range task.Projects {
sb.WriteString(fmt.Sprintf(" +%s", project))
}
}
if task.HasAdditionalTags() {
keys := make([]string, 0, len(task.AdditionalTags))
for key := range task.AdditionalTags {
keys = append(keys, key)
}
sort.Strings(keys)
for _, key := range keys {
sb.WriteString(fmt.Sprintf(" %s:%s", key, task.AdditionalTags[key]))
}
}
if task.HasDueDate() {
sb.WriteString(fmt.Sprintf(" due:%s", task.DueDate.Format(DateLayout)))
}
return sb.String()
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.