content
stringlengths 10
4.9M
|
---|
/**
* @param fragment {@link Fragment} invoking the integration.
* {@link #startActivityForResult(Intent, int)} will be called on the {@link Fragment} instead
* of an {@link Activity}
*/
public static IntentIntegrator forSupportFragment(android.support.v4.app.Fragment fragment) {
MyIntentIntegrator integrator = new MyIntentIntegrator(fragment.getActivity());
integrator.supportFragment = fragment;
return integrator;
} |
Low-cost solar water heater
This paper presents an overview of the use of solar energy for domestic water heating purposes. In addition, the results of a preliminary research to develop a low cost system for heating water using solar energy, a green energy source is also included. For the system presented herewith, the absorption of sunrays radiated heat is by means of a thermal collector. The heating and recirculation process are automatically controlled using sensors, actuators, and a Programmable Logic Controller (PLC). The results obtained indicates that the system presented have the potential to supply enough hot water to meet the requirements for domestic consumption. |
/**
* Metodo para crear la password de forma aleatoria.
*/
public void crearPassword() {
password = "";
char[] minusculas = "abcdefghijklmnopqrstuvwxyz".toCharArray();
char[] mayusculas = "abcdefghijklmnopqrstuvwxyz".toUpperCase().toCharArray();
char[] numeros = "0123456789".toCharArray();
char[] simbolos = "'¿?*+-$%".toCharArray();
Random random = new Random();
int pos = random.nextInt(mayusculas.length);
password += mayusculas[pos];
for (int i = 0; i < 5; i++) {
random = new Random();
pos = random.nextInt(minusculas.length);
password += minusculas[pos];
}
random = new Random();
pos = random.nextInt(numeros.length);
password += numeros[pos];
random = new Random();
pos = random.nextInt(simbolos.length);
password += simbolos[pos];
} |
/**
* Flatten categories as expected by Hub
*
* @param {any} categoriesAggs categories aggs array as [{ key, docCount }]
* @returns {any}
*
* Input example:
* [{ key: '/categories/economy', docCount: 4 }, { key: 'categories/economy/business', docCount: 5 }]
* Output: [{ key: 'economy', docCount: 9 }, { key: 'business', docCount: 5 }]
*/
export function flattenCategories(categoriesAggs: any = []) {
const set = new Set();
const exclude = ["", "categories"];
// 1. get a flattened unique set of categories
categoriesAggs.forEach((agg: any) => {
const candidates = agg.key
.split("/")
.filter((k: string) => exclude.indexOf(k) === -1);
candidates.forEach((k: string) => {
set.add(k);
});
});
// 2. sum docCount for unique keys
const flattenedCategoriesAggs = Array.from(set).reduce(
(flattenedAggs: any, uniqueKey: any) => {
const docCount = categoriesAggs
.filter((agg: any) => agg.key.includes(uniqueKey))
.map((agg: any) => agg.docCount)
.reduce((x: number, y: number) => x + y);
flattenedAggs.push({
key: uniqueKey,
docCount
});
return flattenedAggs;
},
[]
);
return flattenedCategoriesAggs;
}
|
def parse_readout(self, readout_string):
syn_len = (self.d ** 2 - 1) // 2
chunks = readout_string.split(" ")
int_syndromes = [int(x, base=2) for x in chunks[-1:0:-1]]
xor_syndromes = [a ^ b for (a, b) in zip(int_syndromes, int_syndromes[1:])]
mask_Z = "1" * syn_len
mask_X = mask_Z + "0" * syn_len
X_syndromes = [(x & int(mask_X, base=2)) >> syn_len for x in xor_syndromes]
Z_syndromes = [x & int(mask_Z, base=2) for x in xor_syndromes]
X = []
for T, syndrome in enumerate(X_syndromes):
for loc in range(syn_len):
if syndrome & 1 << loc:
X.append((T, -0.5 + loc, 0.5 + loc % 2))
Z = []
for T, syndrome in enumerate(Z_syndromes):
for loc in range(syn_len):
if syndrome & 1 << loc:
Z.append((T, 0.5 + loc // 2, 0.5 + loc % 2 * 2 - loc // 2))
return (
int(chunks[0]),
{"X": X, "Z": Z,},
) |
<gh_stars>1-10
package logrotate
import (
"bytes"
"fmt"
"io"
"os"
"sync"
"time"
)
var _ io.WriteCloser = (*Logrotate)(nil)
// Logrotate struct
type Logrotate struct {
sync.Mutex
Age time.Duration
Num int
Size int
Timestamp bool
file *os.File
sTime time.Time
size int64
}
// New return instance of Logrotate
// defaults
// age 86400 rotate every 24h0m0s
// num 7 files
// size 0 no limit size
// timestamp false
func New(logfile string, age, num, size int, timestamp bool) (*Logrotate, error) {
f, err := os.OpenFile(logfile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
if err != nil {
return nil, err
}
Age := time.Duration(0)
if age > 0 {
Age = time.Duration(age) * time.Second
}
num--
if num < 0 {
num = 7
}
Size := 0
if size > 0 {
Size = size * 1048576
}
lg := &Logrotate{
Age: Age,
Num: num,
Size: Size,
Timestamp: timestamp,
file: f,
sTime: time.Now(),
}
// rotate if needed
if i, err := lg.file.Stat(); err == nil {
if lg.Age > 0 && time.Since(i.ModTime()) >= lg.Age {
if err := lg.rotate(); err != nil {
return nil, err
}
} else if lg.Size > 0 && i.Size() > int64(lg.Size) {
if err := lg.rotate(); err != nil {
return nil, err
}
}
}
return lg, nil
}
// Write implements io.Writer
func (l *Logrotate) Write(p []byte) (n int, err error) {
l.Lock()
defer l.Unlock()
var log []byte
if l.Timestamp {
t := []byte(time.Now().UTC().Format(time.RFC3339Nano))
c := [][]byte{t, p}
log = bytes.Join(c, []byte(" "))
} else {
log = p
}
writeLen := int64(len(log))
// rotate based on Age and size
if l.Age > 0 && time.Since(l.sTime) >= l.Age {
l.sTime = time.Now()
if err := l.rotate(); err != nil {
return 0, err
}
} else if l.Size > 0 && l.size+writeLen > int64(l.Size) {
if err := l.rotate(); err != nil {
return 0, err
}
}
n, err = l.file.Write(log)
l.size += int64(n)
return n, err
}
// Close implements io.Closer, and closes the current logfile
func (l *Logrotate) Close() error {
l.Lock()
defer l.Unlock()
return l.close()
}
// close closes the file if it is open
func (l *Logrotate) close() error {
if l.file == nil {
return nil
}
err := l.file.Close()
l.file = nil
return err
}
// Rotate helper function for rotate
func (l *Logrotate) Rotate() error {
l.Lock()
defer l.Unlock()
return l.rotate()
}
// rotate close existing log file and create a new one
func (l *Logrotate) rotate() error {
name := l.file.Name()
l.close()
// rotate logs
for i := l.Num; i >= 0; i-- {
logfile := fmt.Sprintf("%s.%d", name, i)
if _, err := os.Stat(logfile); err == nil {
// delete old file
if i == l.Num {
os.Remove(logfile)
} else if err := os.Rename(logfile, fmt.Sprintf("%s.%d", name, i+1)); err != nil {
return err
}
}
}
// create logfile.log.0
if err := os.Rename(name, fmt.Sprintf("%s.0", name)); err != nil {
return err
}
// create new log file
f, err := os.OpenFile(name, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644)
if err != nil {
return err
}
l.file = f
l.size = 0
return nil
}
|
Facetoplasty Using Radiofrequency Thermocoagulation for Facet Joint Hypertrophy.
UNLABELLED
Lumbar spinal stenosis is one of most common pathologic conditions affecting the lumbar spine. Pain and/or disability in the low back and lower extremities with or without neurogenic claudication may occur as a result of compression of dural sac contents or nerve roots in the narrowed space. Bulging and protrusion, facet joint hypertrophy, and disc herniation combined with osteophytes and arthritic changes of facet joints can be the cause of lumbar spinal stenosis. Medical/interventional treatment may be considered as an initial treatment for patients with mild symptoms of lumbar spinal stenosis. Surgery is usually considered when medical/interventional treatment has failed. Even though surgery has been considered to be the definitive treatment for spinal stenosis conventionally, it has potential problems including general anesthesia related complications and failed back surgery syndrome. For that reason, minimally invasive techniques such as percutaneous endoscopic lumbar discectomy (PELD), epiduroscopic laser neural decompression (ELND), and nucleoplasty with radiofrequency have been developed as alternatives to surgery.The authors present a case of treating lumbar spinal stenosis by using radiofrequency thermocoagulation. Radiofrequency therapy is used for spinal pain, usually in forms of neurotomy or nucleoplasty. The patient in this case had leg pain with neurogenic claudication caused by lumbar spinal stenosis from facet joint hypertrophy. His pain did not respond to conservative treatment including epidural steroid injection, but he didn't want to get surgery. As an alternative to surgery, we applied radiofrequency thermocoagulation with high temperatures of electrode to the hypertrophied facet joint for the decompressing of the spinal nerve and the patient's pain was improved without any complications after the treatment.
KEY WORDS
Low back pain, neurogenic claudication, lumbar spinal stenosis, facet joint hypertrophy, radiofrequency thermocoagulation, minimally invasive technique. |
/**
* MigrateRepoForm form for migrating repository
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class MigrateRepoForm {
@JSONField(name = "auth_password")
private String authPassword;
@JSONField(name = "auth_username")
private String authUsername;
@JSONField(name = "clone_addr")
private String cloneAddr;
@JSONField(name = "description")
private String description;
@JSONField(name = "issues")
private Boolean issues;
@JSONField(name = "labels")
private Boolean labels;
@JSONField(name = "milestones")
private Boolean milestones;
@JSONField(name = "mirror")
private Boolean mirror;
@JSONField(name = "private")
private Boolean _private;
@JSONField(name = "pull_requests")
private Boolean pullRequests;
@JSONField(name = "releases")
private Boolean releases;
@JSONField(name = "repo_name")
private String repoName;
@JSONField(name = "uid")
private Long uid;
@JSONField(name = "wiki")
private Boolean wiki;
} |
Constitutional promises of indigenous recognition: Canada, Vanuatu and the challenges of pluralism
The Constitutions of Canada and Vanuatu commit to recognition of ‘Aboriginal rights’ and ‘customary laws’, respectively. The translation of these aspirations has led the courts deep into the challenges of pluralism, magnified here by the weight of colonialism and constitutional context. This article explores the progress in these two contrasting countries to provide a broader view of the undertaking. It is argued that the persistence of visible problems reveals more fundamental difficulties and that the collaboration essential to the task of ‘recognition’—and to shoring up Western legal systems in the modern reality—must begin earlier and run deeper. |
/**
* An AudioFiller implementation for feeding data from a PCMFLOAT wavetable.
* We do simple, linear interpolation for inter-table values.
*/
public class WaveTableSource extends AudioSource {
@SuppressWarnings("unused") private static String TAG = WaveTableSource.class.getSimpleName();
/** The samples defining one cycle of the waveform to play */
protected float[] mWaveTbl;
/** The number of samples in the wave table. Note that the wave table is presumed to contain
* an "extra" sample (a copy of the 1st sample) in order to simplify the interpolation
* calculation. Thus, this value will be 1 less than the length of mWaveTbl.
*/
protected int mNumWaveTblSamples;
/** The phase (offset within the wave table) of the next output sample.
* Note that this may (will) be a fractional value. Range 0.0 -> mNumWaveTblSamples.
*/
protected float mSrcPhase;
/** The sample rate at which playback occurs */
protected float mSampleRate = 48000; // This seems likely, but can be changed
/** The frequency of the generated audio signal */
protected float mFreq = 1000; // Some reasonable default frequency
/** The "Nominal" frequency of the wavetable. i.e., the frequency that would be generated if
* each sample in the wave table was sent in turn to the output at the specified sample rate.
*/
protected float mFN;
/** 1 / mFN. Calculated when mFN is set to avoid a division on each call to fill() */
protected float mFNInverse;
/**
* Constructor.
*/
public WaveTableSource() {
}
/**
* Calculates the "Nominal" frequency of the wave table.
*/
private void calcFN() {
mFN = mSampleRate / (float)mNumWaveTblSamples;
mFNInverse = 1.0f / mFN;
}
/**
* Sets up to play samples from the provided wave table.
* @param waveTbl Contains the samples defining a single cycle of the desired waveform.
* This wave table contains a redundant sample in the last slot (== first slot)
* to make the interpolation calculation simpler, so the logical length of
* the wave table is one less than the length of the array.
*/
public void setWaveTable(float[] waveTbl) {
mWaveTbl = waveTbl;
mNumWaveTblSamples = waveTbl != null ? mWaveTbl.length - 1 : 0;
calcFN();
}
/**
* Sets the playback sample rate for which samples will be generated.
* @param sampleRate
*/
public void setSampleRate(float sampleRate) {
mSampleRate = sampleRate;
calcFN();
}
/**
* Set the frequency of the output signal.
* @param freq Signal frequency in Hz.
*/
public void setFreq(float freq) {
mFreq = freq;
}
/**
* Resets the playback position to the 1st sample.
*/
@Override
public void reset() {
mSrcPhase = 0.0f;
}
/**
* Fills the specified buffer with values generated from the wave table which will playback
* at the specified frequency.
*
* @param buffer The buffer to be filled.
* @param numFrames The number of frames of audio to provide.
* @param numChans The number of channels (in the buffer) required by the player.
* @return The number of samples generated. Since we are generating a continuous periodic
* signal, this will always be <code>numFrames</code>.
*/
@Override
public int pull(float[] buffer, int numFrames, int numChans) {
Log.e(TAG,"WaveTableSource pull...");
final float phaseIncr = mFreq * mFNInverse;
int outIndex = 0;
for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
// 'mod' back into the waveTable
while (mSrcPhase >= (float)mNumWaveTblSamples) {
mSrcPhase -= (float)mNumWaveTblSamples;
}
// linear-interpolate
int srcIndex = (int)mSrcPhase;
float delta0 = mSrcPhase - (float)srcIndex;
float delta1 = 1.0f - delta0;
float value = ((mWaveTbl[srcIndex] * delta0) + (mWaveTbl[srcIndex + 1] * delta1));
// Put the same value in all channels.
// This is inefficient and should be pulled out of this loop
for (int chanIndex = 0; chanIndex < numChans; chanIndex++) {
buffer[outIndex++] = value;
}
mSrcPhase += phaseIncr;
}
return numFrames;
}
/*
* Standard wavetable generators
*/
/**
* Generates a sin waveform wavetable.
* @param buffer The buffer to receive the sample values.
*/
public static void genSinWave(float[] buffer) {
int size = buffer.length;
float incr = ((float)Math.PI * 2.0f) / (float)(size - 1);
for(int index = 0; index < size; index++) {
buffer[index] = (float)Math.sin(index * incr);
}
}
/**
* Generates a triangular waveform
* @param buffer The buffer to receive the sample values.
* @param maxValue The maximum value for the generated wavetable
* @param minValue The minimum value for the generated wavetable.
* @param dutyCycle The fraction of wavetable for the first 1/4 of the triangle wave.
*/
public static void genTriangleWave(
float[] buffer, float maxValue, float minValue, float dutyCycle) {
float range = maxValue - minValue;
int size = buffer.length - 1;
// Make a triangle that goes 0 -> max -> min -> 0.
int index = 0;
int phase0Size = (int) (size / 2 * dutyCycle);
int breakIndex = phase0Size;
float val = 0;
// Phase 0 (0 -> max)
if (phase0Size != 0) {
float phase0Incr = maxValue / (float) phase0Size;
for (; index < breakIndex; ++index) {
buffer[index] = val;
val += phase0Incr;
}
} else {
val = maxValue;
}
// Phase 1 & 2 (max -> min)
breakIndex = size - phase0Size;
float incr = -range / ((float) size * (1.0f - dutyCycle));
for (; index < breakIndex; ++index) {
buffer[index] = val;
val += incr;
}
// Phase 3 (min -> 0)
if (phase0Size != 0) {
float phase0Incr = maxValue / (float) phase0Size;
for (; index < size; ++index) {
buffer[index] = val;
val += phase0Incr;
}
}
buffer[size] = buffer[0];
}
} |
/**
* This method will retry a given function if the resource is still in the "Terminating" phase. We need to catch this exception
* because the parsing of JSON responses is buggy: https://github.com/kubernetes-client/java/issues/86
*
* @param obj The object that will be passed to the function
* @param function the function that will be applied after sleeping
* @param e the caught exception
* @param <T> Any Kubernetes Object
*/
private <T> void retry(T obj, @NotNull Consumer<T> function, @NotNull ApiException e) {
if (e.getMessage().equals("Conflict")) {
var resp = new Gson().fromJson(e.getResponseBody(), KubeStatusResponse.class);
if (resp.getMessage().startsWith("object is being deleted")) {
try {
log.info("Object is still being deleted, retrying...");
Thread.sleep(4000);
function.accept(obj);
return;
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
if (resp.getMessage().contains("already exists")) {
return;
}
}
log.error("Unknown error", e);
log.info(e.getResponseBody());
} |
/**
* @file AnimalSilvestre.cpp
* @brief Implementação da classe AnimalSilvestre.
* @author
* <NAME>,
* <NAME>,
* <NAME>.
*/
#include "AnimalSilvestre.h"
/**
* @brief Construtor padrão da classe AnimalSilvestre.
*/
AnimalSilvestre::AnimalSilvestre(){}
/**
* @brief construtor parametrizado da classe AnimalSilvestre.
* @param new_autorizacao_ibama Conjunto de caracteres que representam a autorização do Ibama.
*/
AnimalSilvestre::AnimalSilvestre(string new_autorizacao_ibama)
{
m_autorizacao_ibama = new_autorizacao_ibama;
}
/**
* @brief Destrutor da classe AnimalSilvestre.
*/
AnimalSilvestre::~AnimalSilvestre(){}
/**
* @brief Método que retorna a autorização do Ibama do animal.
* @return Retorna a autorização do Ibama do animal.
*/
string AnimalSilvestre::get_autorizacao_ibama()
{
return m_autorizacao_ibama;
} |
/**
* To represent a newline value.
*/
protected static class NewlineValue {
protected final String value;
protected NewlineValue(String val) {
value = val;
}
public String getValue() {
return value;
}
} |
// TextToDialogParagraphs multi-line text to DialogParagraph instance.
func TextToDialogParagraphs(lines []string) []*DialogParagraph {
var msgs []*DialogParagraph
for _, line := range lines {
msgs = append(msgs, NewDialogParagraph(line))
}
return msgs
} |
<reponame>acumos/python-model-runner<filename>examples/chain_models.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ===============LICENSE_START=======================================================
# Acumos Apache-2.0
# ===================================================================================
# Copyright (C) 2017-2018 AT&T Intellectual Property & Tech Mahindra. All rights reserved.
# ===================================================================================
# This Acumos software file is distributed by AT&T and Tech Mahindra
# under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============LICENSE_END=========================================================
'''
Provides an example of how to chain Acumos models
'''
from collections import Counter
import requests
import pexpect
from acumos.session import AcumosSession
from acumos.modeling import Model, List, Dict
_JSON = 'application/json'
class Runner(object):
_HEADERS = {'Accept': _JSON, 'Content-Type': _JSON}
def __init__(self, model_dir, port, host='localhost'):
'''Helper class for managing model runners'''
self.model_dir = model_dir
self.base_url = "http://{}:{}".format(host, port)
self.chains = dict()
cmd = "acumos_model_runner --host {} --port {} {}".format(host, port, model_dir)
self.proc = pexpect.spawn(cmd)
self.proc.expect(r'^.*(Booting worker with pid).*$', timeout=5)
def create_chain(self, chain_name, upstream_method, downstream_runner, downstream_method):
'''Creates a chain that invokes a downstream runner with the response of the upstream runner'''
self.chains[chain_name] = (upstream_method, downstream_runner, downstream_method)
def call(self, method, data):
'''Calls a model method with JSON data. If `method` is a chain, returns the downstream response'''
if method in self.chains:
upstream_method, downstream_runner, downstream_method = self.chains[method]
resp_up = requests.post(self._full_url(upstream_method), json=data, headers=self._HEADERS).json()
resp = downstream_runner.call(downstream_method, resp_up)
else:
resp = requests.post(self._full_url(method), json=data, headers=self._HEADERS).json()
return resp
def _full_url(self, method):
'''Returns a full url given a method name'''
return "{}/model/methods/{}".format(self.base_url, method)
if __name__ == '__main__':
'''Test area'''
def tokenize(value: str) -> List[str]:
'''Segments text into tokens'''
return value.split()
def count(value: List[str]) -> Dict[str, int]:
'''Returns a count of tokens'''
return Counter(value)
# define models
tokenizer = Model(tokenize=tokenize)
counter = Model(count=count)
# save models
session = AcumosSession()
session.dump(tokenizer, 'tokenizer', '.')
session.dump(counter, 'counter', '.')
# instantiate runners
runner1 = Runner('tokenizer', 3330)
runner2 = Runner('counter', 3331)
# call individual methods
runner1.call('tokenize', {'value': 'hello world'}) # {'value': ['hello', 'world']}
runner2.call('count', {'value': ['hello', 'world']}) # {'value': {'hello': 1, 'world': 1}}
# create and call chain
runner1.create_chain('count_tokens', 'tokenize', runner2, 'count')
runner1.call('count_tokens', {'value': 'hello world'}) # {'value': {'world': 1, 'hello': 1}}
runner1.proc.terminate()
runner2.proc.terminate()
|
<reponame>BrandonTang89/CP4_Code
/* Kttis - errands
A seemingly easy shortest Hamiltonian path question is made a lot longer due to the need to map string locations to
their 2d grid locations and also the need to print the optimal route rather than just its length.
This requires a conversion from location names to a vector of (x, y) points in a vector that can accessed
according to the bitmask. Then after we find the shortest length possible, we need to trace it and store
the order of locations visited in another vector that we convert backt into the original names.
Debugging:
Remember to use EPS when comparing 2 (previously calculated) floating point numbers to prevent hard to diagnose
errors.
Also remember to flush input buffer before using getline after using cin.
Time: O(2^(n-1) * n^2) per test case
Mem: O(2^(n-1) * n)
*/
#pragma GCC optimize("Ofast")
#pragma GCC target("sse,sse2,sse3,ssse3,sse4,popcnt,abm,mmx,avx,avx2,fma")
#pragma GCC optimize("unroll-loops")
#include <bits/stdc++.h>
using namespace std;
typedef long double ld;
#define LSOne(S) ((S) & -(S))
#define EPS 1e-12
int n;
ld x, y;
string name, tc;
map<string, pair<ld,ld>> locations;
ld memo[10 + 1 + 1][(int) (1<<(11-1))]; // 11 locations, including work, excluding home
vector<pair<ld,ld>> tcl; //test case locations
vector<int> order_visits;
vector<string> reverse_mapper;
ld dist(pair<ld,ld> a, pair<ld, ld> b){
return (sqrt((a.first - b.first)*(a.first - b.first) + (a.second - b.second)*(a.second - b.second)));
}
ld dp(int u, int bm){
if (bm == 0){
return dist(locations["home"], tcl[u]);
}
ld &ans = memo[u][bm];
if (ans != -1)return ans;
ans = 1e9;
int m = bm;
while (m){
int two_pow_v = LSOne(m);
int v = __builtin_ctz(two_pow_v) + 1;
ans = min(ans, dist(tcl[u], tcl[v]) + dp(v, bm^two_pow_v));
m -= two_pow_v;
}
return ans;
}
void fill_order_visits(int u, int bm){
//printf("u: %d, bm: %d\n", u, bm);
if (bm == 0)return;
int m = bm;
while (m){
int two_pow_v = LSOne(m);
int v = __builtin_ctz(two_pow_v) + 1;
m -= two_pow_v;
if (abs(dp(u, bm) - (dist(tcl[u], tcl[v]) + dp(v, bm^two_pow_v))) <= EPS){
// this node is next on the shortest path
order_visits.emplace_back(v);
fill_order_visits(v, bm^two_pow_v);
break;
}
}
return;
}
int main(){
cin >> n;
for (int i=0;i<n;i++){
cin >> name >> x >> y;
locations[name] = make_pair(x,y);
}
cin.ignore(std::numeric_limits<std::streamsize>::max(),'\n');
while (getline(cin, tc)){
// Reset Variables
tcl.clear();
order_visits.clear();
reverse_mapper.clear();
memset(memo, -1, sizeof(memo));
// add in the work location as starting location
tcl.emplace_back(locations["work"]);
reverse_mapper.emplace_back("work");
// Parse Input, split by char
istringstream ss(tc);
while (ss >> name){
tcl.emplace_back(locations[name].first, locations[name].second);
reverse_mapper.emplace_back(name);
}
n = (int) tcl.size();
//for (auto i: tcl){printf("(%Le, %Le) ", i.first, i.second);}cout << endl;
// Hamiltonian Path from work to home
fill_order_visits(0, (1<<(n-1))-1);
for (int i=0; i<(int)order_visits.size(); i++){
cout << reverse_mapper[order_visits[i]] << ((i == (int) (order_visits.size() - 1)) ? '\n' : ' ');
}
}
return 0;
} |
<filename>src/main/java/calculator/validation/SourceRule.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package calculator.validation;
import calculator.common.CommonUtil;
import calculator.engine.annotation.Internal;
import calculator.engine.script.ScriptEvaluator;
import graphql.analysis.QueryVisitorFieldEnvironment;
import graphql.analysis.QueryVisitorFragmentSpreadEnvironment;
import graphql.analysis.QueryVisitorInlineFragmentEnvironment;
import graphql.language.Directive;
import graphql.language.SourceLocation;
import graphql.util.TraverserContext;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import static calculator.common.CommonUtil.getDependenceSourceFromDirective;
import static calculator.common.GraphQLUtil.pathForTraverse;
import static calculator.engine.metadata.Directives.ARGUMENT_TRANSFORM;
import static calculator.engine.metadata.Directives.INCLUDE_BY;
import static calculator.engine.metadata.Directives.MAP;
import static calculator.engine.metadata.Directives.SKIP_BY;
import static java.lang.String.format;
/**
* Check whether the usage of @fetchSource on field is correct.
*
*/
@Internal
public class SourceRule extends AbstractRule {
private final List<String> variableNames;
private final ScriptEvaluator scriptEvaluator;
// <sourceName, annotatedFieldFullPath>
private final Map<String, String> sourceWithAnnotatedField;
// <fieldFullPath, topTaskFieldPath>
private final Map<String, String> fieldWithTopTask;
// <fieldFullPath, List<sourceName>>
private final Map<String, List<String>> sourceUsedByField;
// <fieldFullPath, List<ancestorFullPath>>
private final Map<String, Set<String>> fieldWithAncestorPath;
private final HashSet<String> unusedSource = new HashSet<>();
public SourceRule(
List<String> variableNames,
ScriptEvaluator scriptEvaluator,
Map<String, String> sourceWithAnnotatedField,
Map<String, String> fieldWithTopTask,
Map<String, List<String>> sourceUsedByField,
Map<String, Set<String>> fieldWithAncestorPath) {
this.variableNames = variableNames;
this.scriptEvaluator = Objects.requireNonNull(scriptEvaluator);
this.sourceWithAnnotatedField = sourceWithAnnotatedField;
this.fieldWithTopTask = fieldWithTopTask;
this.sourceUsedByField = sourceUsedByField;
this.fieldWithAncestorPath = fieldWithAncestorPath;
unusedSource.addAll(sourceWithAnnotatedField.keySet());
}
public HashSet<String> getUnusedSource() {
return unusedSource;
}
@Override
public void visitField(QueryVisitorFieldEnvironment environment) {
if (environment.getTraverserContext().getPhase() != TraverserContext.Phase.ENTER) {
return;
}
List<Directive> directives = environment.getField().getDirectives();
if (directives == null || directives.isEmpty()) {
return;
}
String fieldFullPath = pathForTraverse(environment);
for (Directive directive : directives) {
if (Objects.equals(directive.getName(), SKIP_BY.getName())) {
List<String> dependencySources = getDependenceSourceFromDirective(directive);
if (dependencySources == null || dependencySources.isEmpty()) {
continue;
}
if (!validateSourceExist(fieldFullPath, directive, dependencySources)) {
continue;
}
String predicate = (String) CommonUtil.parseValue(
directive.getArgument("predicate").getValue()
);
if (!validateSourceUsageOnExp(fieldFullPath, directive, dependencySources, predicate)) {
continue;
}
if (!validateNodeNameNotSameWithVariable(fieldFullPath, directive, dependencySources)) {
continue;
}
// circular check
if (circularReferenceCheck(directive.getSourceLocation(), fieldFullPath, dependencySources)) {
continue;
}
}else if (Objects.equals(directive.getName(), INCLUDE_BY.getName())) {
List<String> dependencySources = getDependenceSourceFromDirective(directive);
if (dependencySources == null || dependencySources.isEmpty()) {
continue;
}
if (!validateSourceExist(fieldFullPath, directive, dependencySources)) {
continue;
}
String predicate = (String) CommonUtil.parseValue(
directive.getArgument("predicate").getValue()
);
if (!validateSourceUsageOnExp(fieldFullPath, directive, dependencySources, predicate)) {
continue;
}
if (!validateNodeNameNotSameWithVariable(fieldFullPath, directive, dependencySources)) {
continue;
}
// circular check
if (circularReferenceCheck(directive.getSourceLocation(), fieldFullPath, dependencySources)) {
continue;
}
} else if (Objects.equals(directive.getName(), MAP.getName())) {
List<String> dependencySources = getDependenceSourceFromDirective(directive);
if (dependencySources == null || dependencySources.isEmpty()) {
continue;
}
if (!validateSourceExist(fieldFullPath, directive, dependencySources)) {
continue;
}
String mapper = (String) CommonUtil.parseValue(
directive.getArgument("mapper").getValue()
);
if (!validateSourceUsageOnExp(fieldFullPath, directive, dependencySources, mapper)) {
continue;
}
// circular check
if (circularReferenceCheck(directive.getSourceLocation(), fieldFullPath, dependencySources)) {
continue;
}
} else if (Objects.equals(directive.getName(), ARGUMENT_TRANSFORM.getName())) {
List<String> dependencySources = getDependenceSourceFromDirective(directive);
if (dependencySources == null || dependencySources.isEmpty()) {
continue;
}
if (!validateSourceExist(fieldFullPath, directive, dependencySources)) {
continue;
}
String expression = (String) CommonUtil.parseValue(
directive.getArgument("expression").getValue()
);
if (!validateSourceUsageOnExp(fieldFullPath, directive, dependencySources, expression)) {
continue;
}
if (!validateNodeNameNotSameWithVariable(fieldFullPath, directive, dependencySources)) {
continue;
}
// circular check
if (circularReferenceCheck(directive.getSourceLocation(), fieldFullPath, dependencySources)) {
continue;
}
}
}
}
@Override
public void visitInlineFragment(QueryVisitorInlineFragmentEnvironment environment) {
}
@Override
public void visitFragmentSpread(QueryVisitorFragmentSpreadEnvironment environment) {
}
/**
* Determine whether the dependency sources exists, and delete it from unusedSource.
*
* @param fieldFullPath fieldFullPath
* @param directive directive
* @param dependencySources dependencySources
*
* @return true if all dependency sources exist, otherwise false.
*/
private boolean validateSourceExist(String fieldFullPath, Directive directive, List<String> dependencySources) {
// 依赖的source必须存在。
if (!sourceWithAnnotatedField.keySet().containsAll(dependencySources)) {
List<String> unExistSource = dependencySources.stream()
.filter(sourceName -> !sourceWithAnnotatedField.containsKey(sourceName))
.collect(Collectors.toList());
// 错误信息中,名称使用单引号'',路径使用花括号{}
String errorMsg = format(
"the fetchSource %s used by @%s on {%s} do not exist.", unExistSource, directive.getName(), fieldFullPath
);
addValidError(directive.getSourceLocation(), errorMsg);
return false;
}
unusedSource.removeAll(dependencySources);
return true;
}
/**
* Determine whether all dependency sources are used by directive expression.
*
* @param fieldFullPath fieldFullPath
* @param directive directive
* @param dependencySources dependencySources
* @param expression expression
* @return true if all dependency sources is used by expression, otherwise false
*/
private boolean validateSourceUsageOnExp(String fieldFullPath, Directive directive, List<String> dependencySources, String expression) {
List<String> arguments = scriptEvaluator.getScriptArgument(expression);
if (!arguments.containsAll(dependencySources)) {
List<String> unUsedSource = dependencySources.stream()
.filter(sourceName -> !arguments.contains(sourceName))
.collect(Collectors.toList());
String errorMsg = format(
"the fetchSource %s do not used by @%s on {%s}.", unUsedSource, directive.getName(), fieldFullPath
);
addValidError(directive.getSourceLocation(), errorMsg);
return false;
}
return true;
}
/**
* Determine whether the source name is same as variable name.
* @param fieldFullPath fieldFullPath
* @param directive directive
* @param dependencySources dependency sources name list
* @return true if any error.
*/
private boolean validateNodeNameNotSameWithVariable(String fieldFullPath, Directive directive, List<String> dependencySources) {
List<String> sourcesWithSameArgumentName = dependencySources.stream().filter(variableNames::contains).collect(Collectors.toList());
if (!sourcesWithSameArgumentName.isEmpty()) {
String errorMsg = format(
"the dependencySources %s on {%s} must be different to variable name %s.",
sourcesWithSameArgumentName, fieldFullPath, variableNames
);
addValidError(directive.getSourceLocation(), errorMsg);
return false;
}
return true;
}
private boolean circularReferenceCheck(SourceLocation sourceLocation, String fieldFullPath, List<String> dependencySources) {
for (String dependencySource : dependencySources) {
ArrayList<String> pathList = new ArrayList<>();
pathList.add(fieldFullPath);
pathList.add(sourceWithAnnotatedField.get(dependencySource));
if (doCircularReferenceCheck(sourceLocation, pathList)) {
return true;
}
}
return false;
}
/**
* Determine whether fields in dependency path list share same topTask field, or ancestor depend on descendant field.
*
* @param sourceLocation the field which dependency path list start
* @param pathList dependency pathList
* @return true if there is an error
*/
private boolean doCircularReferenceCheck(SourceLocation sourceLocation, ArrayList<String> pathList) {
for (int i = 0; i < pathList.size() - 1; i++) {
String fromPath = pathList.get(i);
List<String> tmpTraversedPath = new ArrayList();
tmpTraversedPath.add(fromPath);
for (int j = i + 1; j < pathList.size(); j++) {
String toPath = pathList.get(j);
tmpTraversedPath.add(toPath);
if (Objects.equals(fromPath, toPath)) {
String errorMsg = format(
"there is a circular dependency path %s.", tmpTraversedPath
);
addValidError(sourceLocation, errorMsg);
return true;
}
if (fieldWithAncestorPath.get(toPath).contains(fromPath)) {
String errorMsg = format(
"there is an ancestor relationship between {%s} and {%s}, " +
"and they are in the dependency path %s.",
toPath, fromPath, tmpTraversedPath
);
addValidError(sourceLocation, errorMsg);
return true;
}
}
}
LinkedHashMap<String, String> fieldFullByTopTaskPath = new LinkedHashMap<>();
List<String> traversedPath = new ArrayList();
for (String fieldFullPath : pathList) {
String topTaskPath = fieldWithTopTask.get(fieldFullPath);
if (fieldFullByTopTaskPath.containsValue(topTaskPath)) {
String errorMsg = format(
"the %s and %s share same ancestor '%s', and they are in the dependency path %s.",
fieldFullByTopTaskPath.get(topTaskPath), fieldFullPath, topTaskPath, traversedPath
);
addValidError(sourceLocation, errorMsg);
return true;
}
traversedPath.add(fieldFullPath);
fieldFullByTopTaskPath.put(topTaskPath, fieldFullPath);
}
String sourceField = pathList.get(pathList.size() - 1);
for (String sourceName : sourceUsedByField.getOrDefault(sourceField, Collections.emptyList())) {
String dependencyField = sourceWithAnnotatedField.get(sourceName);
pathList.add(dependencyField);
if (doCircularReferenceCheck(sourceLocation, pathList)) {
return true;
}
pathList.remove(dependencyField);
}
return false;
}
}
|
package de.discord.manage.managers;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import de.discord.core.Error;
import de.discord.core.RadioBot;
import de.discord.manage.other.Utils;
import de.discord.manage.sql.LiteSQL;
import de.discord.music.MusicController;
import net.dv8tion.jda.api.Permission;
import net.dv8tion.jda.api.entities.Guild;
import net.dv8tion.jda.api.entities.Member;
import net.dv8tion.jda.api.entities.TextChannel;
import net.dv8tion.jda.api.events.interaction.SlashCommandEvent;
import net.dv8tion.jda.api.exceptions.InsufficientPermissionException;
import net.dv8tion.jda.api.managers.AudioManager;
import javax.annotation.Nullable;
public class RadioManager {
public Map<String, RadioStation> radios;
public Map<RadioStation.Country, List<RadioStation>> countryStationList;
public static Map<String, RadioStation.Country> countryMap;
public RadioManager() {
radios = new ConcurrentHashMap<>();
countryStationList = new ConcurrentHashMap<>();
countryMap = new ConcurrentHashMap<>();
for (RadioStation.Country c : RadioStation.Country.values()) {
countryStationList.put(c, new ArrayList<>());
countryMap.put(c.name().toLowerCase(), c);
}
ResultSet set = LiteSQL.onQuery("SELECT * FROM radiostation");
try {
if (set != null) {
while (set.next()) {
String name = set.getString("name");
String url = set.getString("url");
String title = set.getString("title");
String countryName = set.getString("country");
RadioStation.Country country;
if ((country = countryMap.get(countryName)) != null) {
RadioStation station = new RadioStation(url, title, country, name);
radios.put(name, station);
} else {
throw new IllegalArgumentException("Country Name wurde nicht gefunden " + countryName);
}
}
}
} catch (SQLException e) {
e.printStackTrace();
}
List<String> list = Arrays.asList(radios.keySet().toArray(new String[]{}));
Collections.sort(list);
for (String s : list) {
RadioStation station = radios.get(s);
countryStationList.get(station.country).add(station);
}
}
public void addRadio(String name, String url, String title, RadioStation.Country country) {
RadioStation station = new RadioStation(url, title, country, name);
radios.put(name, station);
countryStationList.get(country).add(station);
LiteSQL.preparedUpdate("INSERT INTO radiostation(name, url, title, country) VALUES(?, ?, ?, ?)", name, url, title, country.name().toLowerCase());
}
public void addRadioV2(String cmd, String name, RadioStation.Country country, String url) {
RadioStation station = new RadioStation(url, ":radio: Now playing: " + name, country, cmd);
radios.put(cmd, station);
countryStationList.get(country).add(station);
LiteSQL.preparedUpdate("INSERT INTO radiostation(name, url, title, country) VALUES(?, ?, ?, ?)", cmd, url, ":radio: Now playing: " + name, country.name().toLowerCase());
}
public void removeRadio(String name) {
RadioStation station = radios.remove(name);
countryStationList.get(station.country).remove(station);
LiteSQL.preparedUpdate("DELETE FROM radiostation WHERE name = ?", name);
}
public void TriggerRadio(String command, TextChannel channel, Member member, Guild guild, @Nullable SlashCommandEvent event) {
if (member == null) return;
RadioStation station;
if ((station = radios.get(command)) != null) {
if (Utils.checkUserPermission(channel, member,this.getClass())) return;
if (Objects.requireNonNull(member.getVoiceState()).inVoiceChannel() && guild.getSelfMember().hasPermission(Objects.requireNonNull(member.getVoiceState().getChannel()), Permission.VOICE_SPEAK)) {
AudioManager manager = guild.getAudioManager();
if (botAvailable(manager, member)) {
try {
manager.openAudioConnection(member.getVoiceState().getChannel());
RadioBot.INSTANCE.logger.info("[RADIO] INFO " + station.name + " executed by " + member.getUser().getAsTag() + " in " + guild.getName());
MusicController controller = RadioBot.INSTANCE.playerManager.getController(guild);
controller.currentStation = station;
RadioBot.INSTANCE.playerManager.playRadio(guild, station, channel, event, member);
} catch (InsufficientPermissionException e) {
Error.set(channel, Error.Type.ChannelFull, getClass().getSimpleName(), event);
}
} else {
Error.set(channel, Error.Type.DifferentVC, getClass().getSimpleName(), event);
}
} else {
Error.set(channel, Error.Type.NoVC, getClass().getSimpleName(), event);
}
} else if (event != null) {
event.reply("This station doesn't exist!\nSee `" + RadioBot.INSTANCE.prefixMan.getPrefix(guild.getIdLong()) + "radio` for every station").queue(interactionHook -> interactionHook.deleteOriginal().queueAfter(5, TimeUnit.SECONDS));
}
}
public static boolean botAvailable(AudioManager manager, Member member) {
if (manager.getConnectedChannel() == null && !manager.isConnected()) {
return true;
}
boolean b = true;
for (Member m : manager.getConnectedChannel().getMembers()) {
if (!m.getUser().isBot()) {
b = false;
break;
}
}
if (manager.getConnectedChannel().getMembers().contains(member)) {
b = true;
}
return b;
}
} |
package com.turboconsulting.Service;
import com.turboconsulting.DAO.*;
import com.turboconsulting.Entity.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.*;
@Service
public class ConsentService implements ConsentServiceInterface {
@Autowired
@Qualifier("sqlVisitorData")
private VisitorDao visitorDao;
@Autowired
@Qualifier("sqlVisitorExperimentData")
private VisitorExperimentDao visitorExperimentDao;
@Autowired
@Qualifier("sqlAccountData")
private AccountDao accountDao;
@Autowired
@Qualifier("sqlExperimentData")
private ExperimentDao experimentDao;
@Autowired
@Qualifier("sqlConsentData")
private ConsentOptionDao consentOptionDao;
@Autowired
@Qualifier("sqlConsentExperimentData")
private ConsentExperimentDao consentExperimentDao;
//////////////////////////////////////////////////////////////////////////ACCOUNT FUNCTIONS
@Override
public int getAccountID(String email) {
if(accountDao.findByEmail(email) != null)
return accountDao.findByEmail(email).getAccountId();
return -1;
}
@Override
public Iterable<Visitor> getAccountsVisitors(int aID) {
return visitorDao.findAllByAccount(getAccount(aID));
}
@Override
public Account getAccount(int id) {
return accountDao.findByAccountId(id);
}
@Override
public boolean updateAccountConsent(List<Integer> vIds, ConsentOption c) {
for (int vId : vIds) {
updateVisitorConsent(vId, c);
}
return true;
}
//////////////////////////////////////////////////////////////////////////VISITOR FUNCTIONS
@Override
public Visitor getVisitor(int id) {
return visitorDao.findByVisitorId(id);
}
@Override
public boolean updateVisitorConsent(int id, ConsentOption c) {
Visitor v = getVisitor(id);
if(consentOptionDao.findByName(c.getName()) == null) return false;
if(!c.getName().equals("No Consent") && !c.getName().equals("Full Consent")) return false;
v.setDefaultConsent(consentOptionDao.findByName(c.getName()));
return visitorDao.save(v) != null;
}
//////////////////////////////////////////////////////////////////////////EXPERIMENT FUNCTIONS
@Override
public Experiment getExperiment(int id) {
return experimentDao.findById(id);
}
@Override
public Iterable<ConsentOption> getExperimentsConsentOptions(int eId) {
ArrayList<ConsentOption> consentOptions = new ArrayList<>();
Experiment experiment = experimentDao.findById(eId);
for (ConsentExperiment consentExperiment : experiment.getConsentExperiments()) {
consentOptions.add(consentExperiment.getConsentOption());
}
return consentOptions;
}
//////////////////////////////////////////////////////////////////////VISITOR_EXPERIMENT FUNCTIONS
@Override
public ArrayList<VisitorExperiment> getVisitorExperiments(int id) {
ArrayList<VisitorExperiment> visitorExperimentsList = new ArrayList<>();
Iterable<VisitorExperiment> visitorExperimentsIterable = visitorExperimentDao.findAllByVisitor(visitorDao.findByVisitorId(id));
visitorExperimentsIterable.forEach(visitorExperimentsList::add);
visitorExperimentsList.sort((ve1, ve2) -> ve2.getDate().compareTo(ve1.getDate()));
return visitorExperimentsList;
}
@Override
public VisitorExperiment getVisitorExperiment(int visitorID, int experimentID) {
ArrayList<Experiment> experiments = new ArrayList<>();
Visitor v = visitorDao.findByVisitorId(visitorID);
Experiment e = experimentDao.findById(experimentID);
return visitorExperimentDao.findByVisitorAndExperiment(v, e);
}
@Override
public String getExperimentConsent(int visitorID, int experimentID) {
Visitor v = visitorDao.findByVisitorId(visitorID);
Experiment e = experimentDao.findById(experimentID);
VisitorExperiment visitorExperiment = visitorExperimentDao.findByVisitorAndExperiment(v, e);
return visitorExperiment == null ? "NULL" : visitorExperiment.getConsentOption().getName();
}
@Override
public boolean updateExperimentConsent(int visitorId, ConsentOption newConsentOption, int experimentID) {
Visitor v = visitorDao.findByVisitorId(visitorId);
Experiment e = experimentDao.findById(experimentID);
if ( v == null || e == null ) return false;
VisitorExperiment visitorExperiment = visitorExperimentDao.findByVisitorAndExperiment(v, e);
for(ConsentExperiment c : e.getConsentExperiments()) {
if(c.getConsentOption().getName().equals(newConsentOption.getName()))
visitorExperiment.setConsentOption(c.getConsentOption());
}
return visitorExperimentDao.save(visitorExperiment) != null;
}
@Override
public boolean updateBatchExperimentConsents(int visitorId, ConsentOption c, List<Integer> experimentIds) {
boolean batchSuccess = true;
for (int eID : experimentIds) batchSuccess = updateExperimentConsent(visitorId, c, eID) && batchSuccess;
return batchSuccess;
}
@Override
public int getPendingExperiments(int visitorId) {
return visitorDao.findByVisitorId(visitorId).getPendingExperiments();
}
@Override
public boolean moveAllToReviewed(int visitorId) {
Iterable<VisitorExperiment> visitorExperiments = visitorExperimentDao.findAllByVisitor(getVisitor(visitorId));
for(VisitorExperiment ve : visitorExperiments) {
ve.setChangedConsent(true);
visitorExperimentDao.save(ve);
}
return true;
}
}
|
Selective internalization of granule membrane after secretion in mast cells.
Galactose, covalently bound to cell surface glycoconjugates of rat peritoneal mast cells, was used to study internalization of labeled plasma membrane and granule membrane constituents before or after secretion stimulated by compound 48/80. Internalized label was distinguished quantitatively from label on the cell surface by its inaccessibility to enzymatic removal. Three different situations were compared. (i) With label only on the plasma membrane, and in the absence of secretion, incubation at 37 degrees C (but not at 0 degree C) resulted in a gradual decrease of label on the cell surface until, after approximately equal to 2 hr, a steady state was reached with 93% of all cell-bound label remaining on the cell surface. Recycling of internalized label was demonstrated. (ii) When cells were labeled on the plasma membrane and then stimulated to secrete, subsequent retrieval of (unlabeled) granule membrane did not affect the rate or extent of simultaneous internalization of labeled plasma membrane. (iii) When both plasma membrane and exposed granule membrane were labeled after secretion, subsequent incubation at 37 degrees C (but not at 0 degrees C) resulted in approximately equal to 33% of all cell-bound label becoming internalized during 4 hr, indicating additional internalization of label due to retrieval of labeled granule membrane. In all three cases, loss of label into the medium occurred with a half-life of 8-11 hr, showing that no extensive shedding of granule membrane occurred after secretion. The results suggest either that no mixing of labeled membrane constituents occurred between the plasma membrane and granule membrane or that during retrieval of granule membrane, sorting of membrane was taking place at the cell surface. |
module ReaderMonad where
import Control.Monad
import Control.Monad.Reader
import Data.Map.Strict
import qualified Data.Map.Strict as M
type Env = Map SymName Value
type SymName = String
type Value = String
-- | Find a value related to a symbol if present in the
-- env in Reader Monadic Context
findVal :: SymName -> ReaderT Env IO (Maybe Value)
findVal sym = do
env <- ask
return $ M.lookup sym env
data Command =
Assign String String
| Lookup String
| Exit
| InvalidCommand
deriving (Show)
parseCommand :: String -> Command
parseCommand str
| elem '=' str = Assign symName value
| str == "exit" = Exit
| length str == 1 = Lookup str
| otherwise = InvalidCommand
where symName = takeWhile (/='=') str
value = tail $ dropWhile (/='=') str
readerloop :: ReaderT Env IO String
readerloop = do
env <- ask
input <- lift getLine
case parseCommand input of
Assign name value -> do
lift $ print ("New Value Added")
local (insert name value) readerloop
Lookup symName -> case M.lookup symName env of
Nothing -> do
lift $ print ("The value for " ++ symName ++ " is not defined!")
readerloop
Just value -> do
lift $ print (value)
readerloop
Exit -> return (show env)
InvalidCommand -> do
lift $ print "Invalid Command!"
readerloop
|
<reponame>GaiheiluKamei/Algorithms<filename>offer/06_PrintListInReverse/reverse_test.go
package offer
import (
"reflect"
"testing"
)
var (
n1 = &Node{5, n2}
n2 = &Node{4, n3}
n3 = &Node{3, n4}
n4 = &Node{2, n5}
n5 = &Node{1, nil}
)
func TestDoubleRangeVersion(t *testing.T) {
// 多个节点的链表
if !reflect.DeepEqual(DoubleRangeVersion(n1), []int{1, 2, 3, 4, 5}) {
t.Error("【double】sorry for my shit code: ", DoubleRangeVersion(n1))
}
if !reflect.DeepEqual(RecursionVersion(n1), []int{1, 2, 3, 4, 5}) {
t.Error("【recursion】sorry for my shit code: ", RecursionVersion(n1))
}
// 只有一个节点的链表
if !reflect.DeepEqual(DoubleRangeVersion(n5), []int{1}) {
t.Error("【double】sorry for my shit code: ", DoubleRangeVersion(n1))
}
if !reflect.DeepEqual(RecursionVersion(n5), []int{1}) {
t.Error("【recursion】sorry for my shit code: ", RecursionVersion(n1))
}
}
|
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
int finding(char str[])
{
int i,flag,len;
len=strlen(str);
flag=1;
for(i=0;i<len;i++)
{
if(str[i]!='4'&&str[i]!='7')
{
flag=0;
break;
}
}
if(flag==1) return 1;
else
return 0;
}
int main()
{
char str[30],str1[30];
int i,len,digit;
long long n;
while(gets(str))
{
len=strlen(str);
digit=0;
for(i=0;i<len;i++)
{
if(str[i]=='4'||str[i]=='7')
digit++;
}
sprintf(str1,"%d",digit);
if(finding(str1)==1)
printf("YES\n");
else
printf("NO\n");
}
return 0;
} |
/**
* Attempts to use given method as an adapter.
*
* @param executableElement the method to process
* @return adapter mapping data or empty if errors occurred
*/
private Optional<AdapterMethodDeclaration> createAdapterMethod(ExecutableElement executableElement) {
var returnType = executableElement.getReturnType();
var parameters = executableElement.getParameters();
if (parameters.isEmpty()) return Optional.empty();
var adapterParameters = new ArrayList<DeclaredType>();
for (var parameter : parameters) {
var type = parameter.asType();
if (!(type instanceof DeclaredType declaredType) || !types.isAssignable(types.erasure(type), futureType)) {
messager.printMessage(Diagnostic.Kind.ERROR, "Type " + parameter.asType() + " is not supported here", parameter);
return Optional.empty();
}
var genericParameter = declaredType.getTypeArguments().get(0);
if (!(genericParameter instanceof TypeVariable)) {
messager.printMessage(Diagnostic.Kind.ERROR, "Type specific adapters are not yet supported.", parameter);
return Optional.empty();
}
adapterParameters.add(declaredType);
}
return Optional.of(new AdapterMethodDeclaration(
executableElement.getSimpleName().toString(),
returnType,
adapterParameters,
executableElement.getThrownTypes()
.stream()
.map(TypeMirror.class::cast)
.toList(),
executableElement
));
} |
<filename>article/usecase/usecase.go
package usecase
import (
"context"
"regexp"
"strconv"
"strings"
"time"
"github.com/google/uuid"
"github.com/pkg/errors"
"github.com/situmorangbastian/ambarita/models"
"github.com/situmorangbastian/eclipse"
)
type usecase struct {
repository models.ArticleRepository
}
// NewArticleUsecase will create new an usecase object representation of models.ArticleUsecase interface
func NewArticleUsecase(repository models.ArticleRepository) models.ArticleUsecase {
return &usecase{
repository: repository,
}
}
func (u usecase) Fetch(ctx context.Context, cursor string, num int) ([]models.Article, string, error) {
articles, nextCursor, err := u.repository.Fetch(ctx, cursor, num)
if err != nil {
return make([]models.Article, 0), "", errors.Wrap(err, "fetch article")
}
if len(articles) == 0 {
nextCursor = cursor
}
return articles, nextCursor, nil
}
func (u usecase) Get(ctx context.Context, ID string) (models.Article, error) {
article, err := u.repository.Get(ctx, ID)
if err != nil {
return models.Article{}, errors.Wrap(err, "get article")
}
return article, nil
}
func (u usecase) Store(ctx context.Context, article models.Article) (models.Article, error) {
article.ID = uuid.New().String()
article.CreateTime = time.Now()
article.UpdateTime = time.Now()
slug, err := u.resolveSlug(ctx, buildSlug(article.Title))
if err != nil {
return models.Article{}, errors.Wrap(err, "resolve slug")
}
article.Slug = slug
err = u.repository.Store(ctx, article)
if err != nil {
return models.Article{}, err
}
return article, nil
}
func (u usecase) Update(ctx context.Context, article models.Article) (models.Article, error) {
currentArticle, err := u.Get(ctx, article.ID)
if err != nil {
return models.Article{}, errors.Wrap(err, "get article on update")
}
article.Slug = currentArticle.Slug
article.UpdateTime = time.Now()
err = u.repository.Update(ctx, article)
if err != nil {
return models.Article{}, errors.Wrap(err, "update article")
}
article.CreateTime = currentArticle.CreateTime
return article, nil
}
func (u usecase) Delete(ctx context.Context, id string) error {
err := u.repository.Delete(ctx, id)
if err != nil {
return errors.Wrap(err, "delete article")
}
return nil
}
func (u usecase) resolveSlug(ctx context.Context, slug string) (string, error) {
_, err := u.repository.Get(ctx, slug)
if err != nil {
switch errors.Cause(err).(type) {
case eclipse.NotFoundError:
return slug, nil
default:
return "", err
}
}
counterSlug := int(1)
for {
newSlug := slug + "-" + strconv.Itoa(counterSlug)
_, err = u.repository.Get(ctx, newSlug)
if err != nil {
switch errors.Cause(err).(type) {
case eclipse.NotFoundError:
return newSlug, nil
default:
return "", err
}
}
counterSlug++
}
}
func buildSlug(keyword string) string {
regex := regexp.MustCompile("[^a-zA-Z0-9]+")
processedSlug := regex.ReplaceAllString(keyword, " ")
toLowerCase := strings.ToLower(processedSlug)
splitTitle := strings.Fields(toLowerCase)
slug := strings.Join(splitTitle, "-")
return slug
}
|
// Creates a new Block from Sparkey files. On success, takes ownership of the
// log file and index file.
func newBlockFromSparkey(storePath string, logPath string, partition int) (*Block, error) {
success := false
name, id := newBlockName(partition)
dstLog := filepath.Join(storePath, name)
dstIdx := sparkey.HashFileName(dstLog)
defer func() {
if !success {
os.Remove(dstLog)
os.Remove(dstIdx)
}
}()
err := os.Rename(logPath, dstLog)
if err != nil {
return nil, fmt.Errorf("importing Sparkey log: %s", err)
}
err = os.Rename(sparkey.HashFileName(logPath), dstIdx)
if err != nil {
return nil, fmt.Errorf("importing Sparkey index: %s", err)
}
b := &Block{
ID: id,
Name: name,
Partition: partition,
}
err = b.open(storePath)
if err != nil {
return nil, err
}
defer func() {
if !success {
b.Delete()
}
}()
b.Count, err = b.readCount()
if err != nil {
return nil, err
}
if b.Count > 0 {
b.minKey, err = b.readMinKey()
if err != nil {
return nil, err
}
}
success = true
return b, nil
} |
def _inject_request_id(request: Request) -> Request:
if request.id is not None:
return request
request_dict = request.to_dict()
request_dict["id"] = f"{request.method}_{randrange(_REQ_ID_MAX)}"
resp = Request.from_dict(request_dict)
assert resp.id is not None
return resp |
def _incomplete_str_support(self, param:'str', metadata_dict:'dict') -> str:
try:
assert isinstance(param, str), 'parameter must be of type str'
multi_keys = {}
for text in metadata_dict.keys():
if param.upper() in text:
multi_keys[text] = metadata_dict.get(text)
if len(multi_keys) > 1:
print(
f'It seems your query **{param}** matches several parameters.Try to be more specific to access your desired value, \n however, you can still make use of my guesses below')
return multi_keys
elif len(multi_keys) == 1:
guess = list(multi_keys.keys())[0]
print(
f'It seems you entered an incomplete query parameter. Was it **{guess}** you meant? If yes, see the result below.\n If no, try the get_all() method to see all'
f' available parameters.')
return multi_keys.get(guess)
else:
return 'Could not find a match for your parameter. Try the .get_all() method to see available parameters \n or be more specific with your query parameter.'
except Exception as err:
print('Invalid query parameter. Ensure the spelling is correct and retry')
raise err |
/**
* Test of get method, of class MySet.
*/
@Test
public void testGet() {
int index = 2;
MySet set = new MySet();
set.add(1);
set.add(2);
set.add(3);
set.add(4);
Object expResult = 3;
Object result = set.get(index);
assertEquals(expResult, result);
} |
#include <stdio.h>
char *b[2]={"C.",".C"};
int res,n,k;
int main(){
scanf("%i", &n), res=n;
k=n%2?n/2:n/2-1;
res+= 2*k*(n-k-1);
printf("%i\n", res);
for(int i=0 ; i<n ; i++, puts(""))
for(int j=0 ; j<n ; j++)
putc(b[i%2][j%2], stdout);
} |
////// Copyright 2021 by <NAME>
////// Licensed under the Apache 2.0 License
#include "nbw/NonBlockingWire.cpp"
#include "nbw/nb_twi.c"
|
#include <dgl/array.h>
#include <dgl/immutable_graph.h>
#include <dgl/zerocopy_serializer.h>
#include <dmlc/memory_io.h>
#include <gtest/gtest.h>
#include <algorithm>
#include <iostream>
#include <vector>
#include "../../src/graph/heterograph.h"
#include "../../src/graph/unit_graph.h"
#include "./common.h"
#ifndef _WIN32
using namespace dgl;
using namespace dgl::aten;
using namespace dmlc;
// Function to convert an idarray to string
std::string IdArrayToStr(IdArray arr) {
arr = arr.CopyTo(DGLContext{kDGLCPU, 0});
int64_t len = arr->shape[0];
std::ostringstream oss;
oss << "(" << len << ")[";
if (arr->dtype.bits == 32) {
int32_t *data = static_cast<int32_t *>(arr->data);
for (int64_t i = 0; i < len; ++i) {
oss << data[i] << " ";
}
} else {
int64_t *data = static_cast<int64_t *>(arr->data);
for (int64_t i = 0; i < len; ++i) {
oss << data[i] << " ";
}
}
oss << "]";
return oss.str();
}
TEST(ZeroCopySerialize, NDArray) {
auto tensor1 = VecToIdArray<int64_t>({1, 2, 5, 3});
auto tensor2 = VecToIdArray<int64_t>({6, 6, 5, 7});
std::string nonzerocopy_blob;
dmlc::MemoryStringStream ifs(&nonzerocopy_blob);
static_cast<dmlc::Stream *>(&ifs)->Write(tensor1);
static_cast<dmlc::Stream *>(&ifs)->Write(tensor2);
std::string zerocopy_blob;
StreamWithBuffer zc_write_strm(&zerocopy_blob, true);
zc_write_strm.Write(tensor1);
zc_write_strm.Write(tensor2);
EXPECT_EQ(nonzerocopy_blob.size() - zerocopy_blob.size(), 126)
<< "Invalid save";
std::vector<void *> new_ptr_list;
// Use memcpy to mimic remote machine reconstruction
for (auto ptr : zc_write_strm.buffer_list()) {
auto new_ptr = malloc(ptr.size);
memcpy(new_ptr, ptr.data, ptr.size);
new_ptr_list.emplace_back(new_ptr);
}
NDArray loadtensor1, loadtensor2;
StreamWithBuffer zc_read_strm(&zerocopy_blob, new_ptr_list);
zc_read_strm.Read(&loadtensor1);
zc_read_strm.Read(&loadtensor2);
}
TEST(ZeroCopySerialize, ZeroShapeNDArray) {
auto tensor1 = VecToIdArray<int64_t>({6, 6, 5, 7});
auto tensor2 = VecToIdArray<int64_t>({});
auto tensor3 = VecToIdArray<int64_t>({6, 6, 2, 7});
std::vector<NDArray> ndvec;
ndvec.push_back(tensor1);
ndvec.push_back(tensor2);
ndvec.push_back(tensor3);
std::string zerocopy_blob;
StreamWithBuffer zc_write_strm(&zerocopy_blob, true);
zc_write_strm.Write(ndvec);
std::vector<void *> new_ptr_list;
// Use memcpy to mimic remote machine reconstruction
for (auto ptr : zc_write_strm.buffer_list()) {
auto new_ptr = malloc(ptr.size);
memcpy(new_ptr, ptr.data, ptr.size);
new_ptr_list.emplace_back(new_ptr);
}
std::vector<NDArray> ndvec_read;
StreamWithBuffer zc_read_strm(&zerocopy_blob, new_ptr_list);
zc_read_strm.Read(&ndvec_read);
EXPECT_EQ(ndvec_read[1]->ndim, 1);
EXPECT_EQ(ndvec_read[1]->shape[0], 0);
}
TEST(ZeroCopySerialize, SharedMem) {
auto tensor1 = VecToIdArray<int64_t>({1, 2, 5, 3});
DGLDataType dtype = {kDGLInt, 64, 1};
std::vector<int64_t> shape{4};
DGLContext cpu_ctx = {kDGLCPU, 0};
auto shared_tensor =
NDArray::EmptyShared("test", shape, dtype, cpu_ctx, true);
shared_tensor.CopyFrom(tensor1);
std::string nonzerocopy_blob;
dmlc::MemoryStringStream ifs(&nonzerocopy_blob);
static_cast<dmlc::Stream *>(&ifs)->Write(shared_tensor);
std::string zerocopy_blob;
StreamWithBuffer zc_write_strm(&zerocopy_blob, false);
zc_write_strm.Write(shared_tensor);
EXPECT_EQ(nonzerocopy_blob.size() - zerocopy_blob.size(), 51)
<< "Invalid save";
NDArray loadtensor1;
StreamWithBuffer zc_read_strm = StreamWithBuffer(&zerocopy_blob, false);
zc_read_strm.Read(&loadtensor1);
}
TEST(ZeroCopySerialize, HeteroGraph) {
auto src = VecToIdArray<int64_t>({1, 2, 5, 3});
auto dst = VecToIdArray<int64_t>({1, 6, 2, 6});
auto mg1 = dgl::UnitGraph::CreateFromCOO(2, 9, 8, src, dst);
src = VecToIdArray<int64_t>({6, 2, 5, 1, 8});
dst = VecToIdArray<int64_t>({5, 2, 4, 8, 0});
auto mg2 = dgl::UnitGraph::CreateFromCOO(1, 9, 9, src, dst);
std::vector<HeteroGraphPtr> relgraphs;
relgraphs.push_back(mg1);
relgraphs.push_back(mg2);
src = VecToIdArray<int64_t>({0, 0});
dst = VecToIdArray<int64_t>({1, 0});
auto meta_gptr = ImmutableGraph::CreateFromCOO(3, src, dst);
auto hrptr = std::make_shared<HeteroGraph>(meta_gptr, relgraphs);
std::string nonzerocopy_blob;
dmlc::MemoryStringStream ifs(&nonzerocopy_blob);
static_cast<dmlc::Stream *>(&ifs)->Write(hrptr);
std::string zerocopy_blob;
StreamWithBuffer zc_write_strm(&zerocopy_blob, true);
zc_write_strm.Write(hrptr);
EXPECT_EQ(nonzerocopy_blob.size() - zerocopy_blob.size(), 745)
<< "Invalid save";
std::vector<void *> new_ptr_list;
// Use memcpy to mimic remote machine reconstruction
for (auto ptr : zc_write_strm.buffer_list()) {
auto new_ptr = malloc(ptr.size);
memcpy(new_ptr, ptr.data, ptr.size);
new_ptr_list.emplace_back(new_ptr);
}
auto gptr = dgl::Serializer::make_shared<HeteroGraph>();
StreamWithBuffer zc_read_strm(&zerocopy_blob, new_ptr_list);
zc_read_strm.Read(&gptr);
EXPECT_EQ(gptr->NumVertices(0), 9);
EXPECT_EQ(gptr->NumVertices(1), 8);
}
#endif // _WIN32 |
package keybaser
import (
"strings"
)
// helpCommand shows commands list and usage
func (k *Keybaser) helpCommand() *CommandDefinition {
return &CommandDefinition{
Description: "show commands list and usage",
Handler: func(request Request, response ResponseWriter) {
var msg strings.Builder
msg.Grow(len(k.botCommands) * 9)
for _, cmd := range k.botCommands {
msg.WriteString(":white_check_mark: *")
msg.WriteString(cmd.Usage())
msg.WriteString("* - ")
msg.WriteString(cmd.Definition().Description)
if cmd.Definition().AuthorizationFunc != nil {
msg.WriteString(":lock:")
}
msg.WriteString("\n")
if len(cmd.Definition().Example) > 0 {
msg.WriteString("> Example: ")
msg.WriteString(cmd.Definition().Example)
msg.WriteString("\n")
}
}
response.Reply(msg.String())
},
}
}
|
def ExtractOneFile(aar, name, abs_output_dir):
if os.name == "nt":
fullpath = os.path.normpath(os.path.join(abs_output_dir, name))
if name[-1] == "/":
with junction.TempJunction(fullpath.rstrip("/")) as juncpath:
pass
else:
with junction.TempJunction(os.path.dirname(fullpath)) as juncpath:
extracted_path = os.path.join(juncpath, os.path.basename(fullpath))
with aar.open(name) as src_fd:
with open(extracted_path, "wb") as dest_fd:
dest_fd.write(src_fd.read())
else:
aar.extract(name, abs_output_dir) |
def findChildNodeOfTypeWithParms(parentnode, nodetype, parmlist,
dorecurse=False, basetypematch=False):
return hou.Node() |
package br.com.economize.dao;
/**
* @author <NAME>
*
*/
import org.junit.Test;
import br.com.economize.domain.Categoria;
public class CategoriaDAOTest {
@Test
public void salvarCategoria() {
CategoriaDAO categoriaDAO = new CategoriaDAO();
Categoria categoria = new Categoria();
categoria.setCategoria("Agronegócios");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Alimentos e Bebidas");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Brinquedos e Games");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Cama Mesa e Banho");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Carros Motos e Autopeças");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Celulares e Telefones");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Construção e Reforma");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Eletrodomésticos");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Eletrônicos");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Embalagens");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Escritórios");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Esporte e Lazer");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Ferramentas e Máquinas");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Foto Câmera e Filmadora");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Informática");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Joias e Relógios");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Materiais de Limpeza");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Moda e Acessórios");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Móveis e Decoração");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Papelarias e Livrarias");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Perfumes e Cosméticos");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Pet Shop");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Saúde e Odontologia");
categoriaDAO.merge(categoria);
new CategoriaDAO();
new Categoria();
categoria.setCategoria("Utensílios Domésticos");
categoriaDAO.merge(categoria);
}
} |
def load(config_string, default_name=None):
split = windows_friendly_colon_split(config_string)
if len(split) > 1:
module_name, object_name = ":".join(split[:-1]), split[-1]
else:
module_name, object_name = config_string, default_name
module = get_module(module_name)
if object_name:
return getattr(module, object_name)
else:
return module |
<filename>packages/sun-react-ui/src/Spin/Spin.tsx
import React, { HTMLAttributes } from 'react';
import classnames from 'classnames';
import { Loader as LoadingIcon } from 'sun-react-icons';
import './style.less';
type Size = 'default' | 'small' | 'large';
interface BaseSpinProps {
size?: Size;
tip?: React.ReactNode;
indicator?: React.ReactNode;
className?: string;
spinning?: boolean;
}
type SpinProps = BaseSpinProps & HTMLAttributes<HTMLElement>;
const prefixClassName = 'spin';
function Spin(props: SpinProps) {
const {
size = 'default',
indicator = <LoadingIcon />,
className,
tip,
children,
spinning = true,
...restProps
} = props;
if (!spinning) return children as JSX.Element;
if (!children) {
return (
<div className={classnames(prefixClassName, className)} {...restProps}>
<span
className={classnames(`${prefixClassName}-indicator`, {
[`${prefixClassName}-${size}-size`]: !!size,
})}
>
{indicator}
</span>
</div>
);
}
return (
<div className={`${prefixClassName}-container`}>
<div
className={classnames(`${prefixClassName}-wrap`, className)}
{...restProps}
>
<span
className={classnames(`${prefixClassName}-indicator`, {
[`${prefixClassName}-${size}-size`]: !!size,
})}
>
{indicator}
</span>
{tip && <div className={`${prefixClassName}-tip`}>{tip}</div>}
</div>
<div className={classnames(`${prefixClassName}-blur`)}>{children}</div>
</div>
);
}
export default Spin;
|
I have been creating several adventure maps in Minecraft. This page logs my efforts.
Willow’s Challenge was the first Adventure Map I made. Can you get past the challenges to reach the cake? Can you find all nine Gold Bricks? (Made in Minecraft 1.7.2).
The Glass Maze will challenge your wits!
The Paintball Arena will challenge your reflexes!
The Digger’s Challenge will challenge your judgement!
The Amazing Maze will challenge your sense of direction!
Co-Op Adventure is a two-person Adventure Map. Two players must work together to bypass a series of challenges and traps. Better have good teamwork! (Made in Minecraft 1.7.2) As an additional challenge, perhaps you can find all the hidden achievements. You will either want to play this one over LAN, or one player will have to host it as a server.
Open the door, and the zombies attack.
Grab a bow and help out your partner!
Pressure plates in the right sequence open doors for your partner… while the lights go out for you.
Your partner will drop supplies to help you get through the wall. By ‘supplies,’ I mean ‘exploding creepers.’
World of Adventure is my most ambitious Map to date, featuring a re-creation of classic D&D Dungeon the Caves of Chaos. Download to play yourself, or host a whole party over LAN or a server. (Made in Minecraft 1.7.4).
Aerial view of the Caves of Chaos valley.
Aerial view of the Village of Town.
One of the entrances to the Caves of Chaos. (Entry F)
A foreboding cave entrance. (Entry I)
Cave entrance. (Entry K)
A chicken stands guard in area 7g.
In the Village of Town, written books provide rumors and in-game information about the Caves of Chaos.
The Chapel of Evil Chaos. (Area 55).
Advertisements |
module Web.WitAI.Types
( module Web.WitAI.Types.Requests
, module Web.WitAI.Types.Responses
, module Web.WitAI.Types.Static
, WitAIConverseSettings (..)
, WitAIConverseHandlers (..)
, WitAIException (..)
) where
import Data.Text
import Control.Exception
import Control.Monad.IO.Class
import Control.Monad.Catch
import Data.Typeable
import Data.Map.Strict
import Web.WitAI.Types.Requests
import Web.WitAI.Types.Responses
import Web.WitAI.Types.Static
type Version = Text
type Token = Text
type SessionID = Text
type Context = Map Text Text
type Message = Text
type Action = Text
-- N.B. YOU MUST AT LEAST SET
-- def {witAI_token = INSERT YOUR TOKEN HERE}
data WitAIConverseSettings = WitAIConverseSettings
{ witAI_version :: Version
-- The wit.ai version used (date format YYYYMMDD) default = "20160731"
, witAI_token :: Token
-- Your wit.ai token
, witAI_session_id :: SessionID
-- A unique id for this session
, witAI_context :: Context
-- The current context of the conversation.
-- Needs to be supplied again after a
, witAI_textfunc :: Message -> Message
-- A function that manipulates the message before it's sent to wit.ai. default = toLower
}
data (MonadIO m, MonadThrow m) => WitAIConverseHandlers m = WitAIConverseHandlers
{ witAI_mergefunc :: Map Text [WitAIConverseEntity] -> Context
-- Function to handle the first MERGE to make the first context
, witAI_actionfunc :: Action -> Context -> m Context
-- Function to handle the ACTIONs received from wit.ai and update the context appropriately
, witAI_messagefunc :: Message -> SessionID -> Context -> m ()
-- Function to handle MESSAGEs the bot should send and
-- should save the Context with the SessionID to continue the conversation after the user replies.
, witAI_stopfunc :: SessionID -> m ()
-- Function to handle the STOP response and close the session
}
class Default a where
def :: a
instance Default WitAIConverseSettings where
def = WitAIConverseSettings "20160731"
""
"test"
Data.Map.Strict.empty
toLower
data WitAIException = InvalidResponse String
deriving Typeable
instance Show WitAIException where
show (InvalidResponse t) = "Invalid response from witAI: " ++ t
instance Exception WitAIException |
a, b, c = map(int, input().split())
def iroha(a,b,c):
if (a==5 or a==7) and (b==5 or b==7) and (c==5 or c==7) and a+b+c==17:
return "YES"
else:
return "NO"
print(iroha(a,b,c)) |
<reponame>KenzieMac130/-ABANDONED-old-engine
#include "asEntry.h"
#include "../common/asCommon.h"
#include "asOsEvents.h"
#include "../renderer/asRendererCore.h"
#include "../resource/asUserFiles.h"
#include "../input/asInput.h"
#include "../common/preferences/asPreferences.h"
#if ASTRENGINE_NUKLEAR
#include "../nuklear/asNuklearImplimentation.h"
#endif
#if ASTRENGINE_DEARIMGUI
#include "../cimgui/asDearImGuiImplimentation.h"
#endif
#include <SDL.h>
#include "../guiTools/cmdConsole/asCmdConsole.h"
#include "../flecs/asFlecsImplimentation.h"
int32_t gContinueLoop;
int32_t gDevConsoleToggleable = 1;
#ifdef NDEBUG
bool gShowDevConsole = false;
#else
bool gShowDevConsole = true;
#endif
#define GLOBAL_INI_NAME "astrengineConfig.ini"
asResults _commandQuit(const char* propName, void* pCurrentValue, void* pNewValueTmp, void* pUserData)
{
gContinueLoop = false;
return AS_SUCCESS;
}
asResults _commandSetLogFreq(const char* propName, void* pCurrentValue, void* pNewValueTmp, void* pUserData)
{
_asDebugLoggerSetSaveFreq((size_t)*(int32_t*)pNewValueTmp);
return AS_SUCCESS;
}
ASEXPORT int asIgnite(int argc, char *argv[], asAppInfo_t *pAppInfo, void *pCustomWindow)
{
/*Info*/
asDebugLog("%s %d.%d.%d", pAppInfo->pAppName, pAppInfo->appVersion.major, pAppInfo->appVersion.minor, pAppInfo->appVersion.patch);
asDebugLog("astrengine %d.%d.%d", ASTRENGINE_VERSION_MAJOR, ASTRENGINE_VERSION_MINOR, ASTRENGINE_VERSION_PATCH);
/*SDL*/
SDL_Init(SDL_INIT_EVERYTHING);
/*User Files*/
asInitUserFiles(pAppInfo->pDevName, pAppInfo->pAppName);
/*Set Debug Log Output File*/
char debugLogPath[4096];
memset(debugLogPath, 0, 4096);
asUserFileMakePath("astrengineLog.txt", debugLogPath, 4096);
_asDebugLoggerInitializeFile(debugLogPath, 16);
/*Init Preferences*/
asPreferenceManager* pPrefMan;
asPreferenceManagerCreate(&pPrefMan);
_asSetGlobalPrefs(pPrefMan);
asPreferencesLoadIni(pPrefMan, GLOBAL_INI_NAME);
/*Set Global Preferences Data*/
asPreferencesRegisterOpenSection(asGetGlobalPrefs(), "core");
asPreferencesRegisterParamInt32(asGetGlobalPrefs(), "logSaveFrequency", NULL, 1, 1024, true, _commandSetLogFreq, NULL, "Number of Lines before Saving");
asPreferencesRegisterParamInt32(asGetGlobalPrefs(), "devConsoleEnabled", &gShowDevConsole, 0, 1, false, NULL, NULL, "Show Developer Console");
asPreferencesRegisterParamInt32(asGetGlobalPrefs(), "devConsoleToggleable", &gDevConsoleToggleable, 0, 1, true, NULL, NULL, "Dev Console Toggleable Developer Console");
asPreferencesRegisterNullFunction(asGetGlobalPrefs(), "quit", _commandQuit, false, NULL, NULL, "Quit Engine (Alt+F4)");
asPreferencesLoadSection(asGetGlobalPrefs(), "core");
/*Console Global Preferences*/
asGuiToolCommandConsole_RegisterPrefManager(pPrefMan, "as");
/*Resource*/
asInitResource();
/*Graphics*/
asInitGfx(pAppInfo, pCustomWindow);
/*Input*/
asInitInputSystem();
#if ASTRENGINE_NUKLEAR
asInitNk();
#endif
#if ASTRENGINE_DEARIMGUI
asInitImGui();
#endif
#if ASTRENGINE_FLECS
/*Initialize Flecs*/
asInitFlecs(argc, argv);
#endif
return 0;
}
ASEXPORT void asShutdown(void)
{
asShutdownInputSystem();
#if ASTRENGINE_FLECS
asShutdownFlecs();
#endif
#if ASTRENGINE_NUKLEAR
asShutdownNk();
#endif
#if ASTRENGINE_DEARIMGUI
asShutdownImGui();
#endif
asShutdownGfx();
asShutdownResource();
asPreferencesSaveSectionsToIni(asGetGlobalPrefs(), GLOBAL_INI_NAME);
asPreferenceManagerDestroy(asGetGlobalPrefs());
asShutdownUserFiles();
SDL_Quit();
asDebugLog("astrengine Quit...");
}
ASEXPORT void asToggleDevConsole()
{
if (!gDevConsoleToggleable) { return; }
if (gShowDevConsole) { gShowDevConsole = false; }
else { gShowDevConsole = true; }
}
ASEXPORT int asLoopSingleShot(double time, asLoopDesc_t loopDesc)
{
/*Dev Console*/
if(gShowDevConsole)
asGuiToolCommandConsoleUI();
/*Update Callbacks*/
if (loopDesc.fpOnTick)
loopDesc.fpOnTick(1.0 / 30);
if (loopDesc.fpOnUpdate)
loopDesc.fpOnUpdate(time);
#if ASTRENGINE_FLECS
/*Flecs Itterate*/
asUpdateFlecs((float)time);
#endif
asGfxInternalDebugDraws();
asImGuiEndFrame();
#if ASTRENGINE_DEARIMGUI
asImGuiPumpInput();
#endif
asInputSystemNextFrame();
asPollOSEvents();
asGfxRenderFrame();
#if ASTRENGINE_DEARIMGUI
asImGuiNewFrame(time);
#endif
return 0;
}
ASEXPORT int asEnterLoop(asLoopDesc_t loopDesc)
{
float deltaTime = 1.0/30; /*Initial Delta Time*/
asTimer_t globalTimer = asTimerStart();
gContinueLoop = true;
while (gContinueLoop)
{
deltaTime = (float)asTimerSeconds(globalTimer, asTimerTicksElapsed(globalTimer));
globalTimer = asTimerRestart(globalTimer);
if (deltaTime < 0) { deltaTime = 0.000001f; }
asLoopSingleShot(deltaTime, loopDesc);
}
return 0;
}
ASEXPORT void asExitLoop()
{
gContinueLoop = false;
} |
// CombineLines reads lines of characters (usually the output from
// SplitToLines) and combines them into a stream of characters (minus
// the new line characters). The stream of characters can be read from
// the returned PipeReader.
func CombineLines(r io.Reader) *io.PipeReader {
rRdr, rWtr := io.Pipe()
go func() {
defer rWtr.Close()
bRdr := bufio.NewReader(r)
for {
line, _, err := bRdr.ReadLine()
if err == nil {
_, err := rWtr.Write(line)
if err != nil {
rWtr.CloseWithError(errors.Wrap(err, "failure writing text to a pipe writer"))
}
} else {
if !errors.Is(err, io.EOF) {
rWtr.CloseWithError(errors.Wrap(err, "failure reading a line of text from a buffered reader"))
}
break
}
}
}()
return rRdr
} |
Stress and Prolactin Effects on Bone Marrow Myeloid Cells, Serum Chemokine and Serum Glucocorticoid Levels in Mice
Objective: Current evidence supports the conclusion that prolactin (PRL) is not an obligate immunoregulatory hormone and influences the immune system predominantly during stress conditions. In this study, we examined the impact of PRL on the psychogenic stress-induced responses of myeloid cells. Methods: Seven-week-old PRL+/– (normal) and PRL–/– (deficient) mice were exposed to a predator for 1 h/day on 3 consecutive days. Another group of PRL-deficient mice received either 1 pituitary graft (hyperprolactinemic) or sham surgery at 5 weeks of age, while PRL-normal mice only received sham surgery. Two weeks later, these mice were also subjected to predator exposure. One day after the last predator exposure session, all mice were killed and the bone marrow and blood harvested. Results: Significant differences in the myeloid cells between PRL-normal and PRL-deficient mice only occurred in stressed conditions. The median serum corticosterone levels were consistently higher in PRL-deficient mice. The implantation of a pituitary graft lowered the corticosterone levels to those observed in PRL-normal mice. The absolute number of immature neutrophils as well as the numbers of granulocyte macrophage, monocyte/macrophage and granulocyte colonies were significantly higher in the stressed PRL-deficient mice; however, only the increased number of immature neutrophils was reversed by pituitary grafting. Conclusions: Our findings support previous observations that PRL influences myeloid cells of the bone marrow most profoundly in stressed conditions. However, the mechanism by which PRL influences bone marrow myeloid cells during stress cannot be explained solely by its effect on serum corticosterone. |
/**
* Tests {@link XmlProvisioningTargetsProvider#findTarget(Class)} when run
* against a targets XML document that has properties that weren't set, and
* thus weren't filtered.
*/
@Test
public void findTargetWithUnfilteredProperties() {
@SuppressWarnings("unchecked")
DataSourceProvisionersManager provisionersManager = new DataSourceProvisionersManager(
new PostgreSqlProvisioner());
URL targetsDocUrl = Thread.currentThread().getContextClassLoader()
.getResource("sample-xml/datasource-provisioning-targets-2.xml");
XmlProvisioningTargetsProvider targetsProvider = new XmlProvisioningTargetsProvider(provisionersManager,
targetsDocUrl);
PostgreSqlProvisioningTarget target = targetsProvider.findTarget(PostgreSqlProvisioningTarget.class);
Assert.assertNotNull(target);
Assert.assertNotNull(target.getServerCoords().getUrl());
Assert.assertNull(target.getServerCoords().getUser());
Assert.assertNull(target.getServerCoords().getPassword());
} |
def cleanForDemo(filename):
data = pd.read_csv(filename)
data=cleanObject(data)
data=cleanNumeric(data)
data=data.dropna()
not_payed_indexes = [1,2,3,5]
payed = [0]
def adapt_loan_status(label):
if label in not_payed_indexes:
return(0)
elif label in payed:
return(1)
else:
return(label)
data['loan_status'] = data['loan_status'].apply(adapt_loan_status)
data = data[data['loan_status']!=4]
return(data) |
/**
* Reads file content from ADLS Gen 1 asynchronously.
*/
public class AdlsAsyncFileReader extends ReusableAsyncByteReader implements ExponentialBackoff {
static class RemoteException {
private String exception;
private String message;
private String javaClassName;
public String getException() {
return exception;
}
public String getMessage() {
return message;
}
public String getJavaClassName() {
return javaClassName;
}
@JsonProperty("RemoteException")
private void unpackRemoteException(Map<String, String> remoteException) {
// JSON error response is as follows:
// { "RemoteException" : {
// "exception" : ...,
// "message" : ...,
// "javaClassName" : ... }
// }
// We need to unpack the RemoteException property to get the meaningful attributes.
exception = remoteException.get("exception");
message = remoteException.get("message");
javaClassName = remoteException.get("javaClassName");
}
}
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AdlsAsyncFileReader.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_READING_DUP_TREE_KEY, true);
private static final int BASE_MILLIS_TO_WAIT = 250; // set to the average latency of an async read
private static final int MAX_MILLIS_TO_WAIT = 10 * BASE_MILLIS_TO_WAIT;
private static final int MAX_RETRIES = 4;
private final ADLSClient client;
private final AsyncHttpClient asyncHttpClient;
private final String path;
private final String sessionId = UUID.randomUUID().toString();
private final DremioAdlFileSystem fs;
private final Long cachedVersion;
private volatile Long latestVersion;
private final String threadName;
private final ExecutorService threadPool;
private int errCode = 0;
/**
* Helper class for processing ADLS responses. This will write to the given output buffer
* for OK results and throw a detailed exception on failure.
*/
private class AdlsResponseProcessor extends AsyncCompletionHandlerBase {
private final ByteBuf outputBuffer;
private boolean isErrorResponse = false;
AdlsResponseProcessor(ByteBuf outputBuffer) {
this.outputBuffer = outputBuffer;
}
@Override
public State onStatusReceived(HttpResponseStatus status) throws Exception {
// The REST service provides error information as part of the response
// body when the response code is 400 or greater, and not a 401 (auth error).
if (status.getStatusCode() >= io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST.code() &&
status.getStatusCode() != HttpConstants.ResponseStatusCodes.UNAUTHORIZED_401) {
isErrorResponse = true;
errCode = status.getStatusCode();
}
return super.onStatusReceived(status);
}
@Override
public AsyncHandler.State onBodyPartReceived(HttpResponseBodyPart content) throws Exception {
if (isErrorResponse) {
return super.onBodyPartReceived(content);
}
outputBuffer.writeBytes(content.getBodyByteBuffer());
return AsyncHandler.State.CONTINUE;
}
@Override
public Response onCompleted(Response response) throws Exception {
if (isErrorResponse) {
throwRemoteException(response.getResponseBody());
}
return response;
}
private void throwRemoteException(String responseString) throws Exception {
final RemoteException remoteEx = OBJECT_MAPPER.readValue(responseString, RemoteException.class);
final OperationResponse resp = new OperationResponse();
resp.remoteExceptionName = remoteEx.getException();
resp.remoteExceptionMessage = remoteEx.getMessage();
resp.remoteExceptionJavaClassName = remoteEx.getJavaClassName();
throw client.getExceptionFromResponse(resp, String.format("Error reading file %s", path));
}
}
public AdlsAsyncFileReader(ADLSClient adlsClient, AsyncHttpClient asyncClient, String path, String cachedVersion,
DremioAdlFileSystem fs, ExecutorService threadPool) {
this.client = adlsClient;
this.asyncHttpClient = asyncClient;
this.path = path;
this.fs = fs;
this.cachedVersion = Long.parseLong(cachedVersion);
this.latestVersion = null;
this.threadName = Thread.currentThread().getName();
this.threadPool = threadPool;
}
@Override
public int getBaseMillis() {
return BASE_MILLIS_TO_WAIT;
}
@Override
public int getMaxMillis() {
return MAX_MILLIS_TO_WAIT;
}
@Override
protected void onClose() {
latestVersion = null;
}
private CompletableFuture<Void> readFullyFuture(long offset, ByteBuf dst, int dstOffset, int len) {
if (latestVersion > cachedVersion) {
logger.debug("[{}] File has been modified, metadata refresh is required", threadName);
final CompletableFuture<Void> future = new CompletableFuture<>();
future.completeExceptionally(new FileNotFoundException("Version of file changed " + path));
return future;
}
final String clientRequestId = UUID.randomUUID().toString();
final int capacityAtOffset = dst.capacity() - dstOffset;
if (capacityAtOffset < len) {
logger.debug("[{}] Buffer has {} bytes remaining. Attempted to write at offset {} with {} bytes", threadName, capacityAtOffset, dstOffset, len);
}
final CompletableFuture<String> future = CompletableFuture.supplyAsync( () -> {
try {
return client.getAccessToken();
} catch (IOException ex) {
throw new CompletionException(ex);
}
}, threadPool);
return future.thenCompose((authToken) -> {
final ADLSClient.AdlsRequestBuilder builder = new ADLSClient.AdlsRequestBuilder(client.getClient(), authToken, logger)
.setOp(ADLSClient.getOpenOperation())
.addQueryParam("read", "true")
.addQueryParam("filesessionid", sessionId)
.addHeader("x-ms-client-request-id", clientRequestId)
.setFilePath(path);
if (offset > 0) {
builder.addQueryParam("offset", Long.toString(offset));
}
if (len >= 0) {
builder.addQueryParam("length", Long.toString(len));
}
return executeAsyncRequest(dst, dstOffset, clientRequestId, builder, 0);
});
}
private CompletableFuture<Void> executeAsyncRequest(ByteBuf dst, int dstOffset, String clientRequestId,
ADLSClient.AdlsRequestBuilder builder, int retryAttemptNum) {
logger.debug("[{}] Sending request with clientRequestId: {}", threadName, clientRequestId);
dst.writerIndex(dstOffset);
final Stopwatch watch = Stopwatch.createStarted();
return asyncHttpClient.executeRequest(builder.build(), new AdlsResponseProcessor(dst))
.toCompletableFuture()
.whenComplete((response, throwable) -> {
if (null == throwable) {
logger.debug("[{}] Request completed for clientRequestId: {}, took {} ms", threadName, clientRequestId,
watch.elapsed(TimeUnit.MILLISECONDS));
} else if (retryAttemptNum < MAX_RETRIES) {
logger.info("[{}] Retry #{}, request failed with {} clientRequestId: {}, took {} ms", threadName, retryAttemptNum + 1, errCode,
clientRequestId, watch.elapsed(TimeUnit.MILLISECONDS), throwable);
} else {
logger.error("[{}] Request failed with {}, for clientRequestId: {}, took {} ms", threadName, errCode,
clientRequestId, watch.elapsed(TimeUnit.MILLISECONDS), throwable);
}
})
.thenAccept(response -> {}) // Discard the response, which has already been handled by AdlsResponseProcessor.
.thenApply(CompletableFuture::completedFuture)
.exceptionally(throwable -> {
if (retryAttemptNum > MAX_RETRIES) {
final CompletableFuture<Void> errorFuture = new CompletableFuture<>();
errorFuture.completeExceptionally(throwable);
return errorFuture;
}
backoffWait(retryAttemptNum);
// Reset the index of the writer for the retry.
dst.writerIndex(dstOffset);
return executeAsyncRequest(dst, dstOffset, clientRequestId, builder, retryAttemptNum + 1);
}).thenCompose(Function.identity());
}
@Override
public CompletableFuture<Void> readFully(long offset, ByteBuf dst, int dstOffset, int len) {
if (latestVersion != null) {
return readFullyFuture(offset, dst, dstOffset, len);
}
final CompletableFuture<Void> getStatusFuture = CompletableFuture.runAsync(() -> {
try {
if (latestVersion == null) {
synchronized (AdlsAsyncFileReader.this) {
if (latestVersion == null) {
latestVersion = fs.getFileStatus(new Path(path)).getModificationTime();
}
}
}
} catch (IOException ex) {
throw new CompletionException(ex);
}
}, threadPool);
return getStatusFuture.thenCompose(Void -> readFullyFuture(offset, dst, dstOffset, len));
}
} |
package org.smart.orm.operations.type;
import org.smart.orm.Model;
import org.smart.orm.OperationContext;
import org.smart.orm.data.JoinType;
import org.smart.orm.data.LogicalType;
import org.smart.orm.data.NodeType;
import org.smart.orm.data.StatementType;
import org.smart.orm.execution.Executor;
import org.smart.orm.execution.ObjectHandler;
import org.smart.orm.execution.ResultData;
import org.smart.orm.functions.Func;
import org.smart.orm.functions.PropertyGetter;
import org.smart.orm.operations.AbstractStatement;
import org.smart.orm.operations.SqlNode;
import org.smart.orm.operations.Token;
import org.smart.orm.operations.text.LimitNode;
import org.smart.orm.reflect.PropertyInfo;
import java.util.List;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class QueryObject<T extends Model<T>> extends AbstractStatement {
private OperationContext context;
private final RelationNode<QueryObject<T>, T> relRoot;
private RelationNode<QueryObject<T>, ?> relLast;
private ConditionNode<QueryObject<T>, ?, ?> whereRoot;
private ConditionNode<QueryObject<T>, ?, ?> whereLast;
private OrderByNode<QueryObject<T>> orderByRoot;
private GroupByNode<QueryObject<T>> groupByRoot;
private LimitNode<QueryObject<T>> limitRoot;
public QueryObject(Class<T> cls) {
relRoot = new RelationNode<>(cls, relLast).attach(this);
}
@Override
public StatementType getType() {
return StatementType.DQL;
}
public RelationNode<QueryObject<T>, T> getRelRoot() {
return relRoot;
}
public <K extends Model<K>> RelationNode<QueryObject<T>, K> join(Class<K> cls) {
String rel = Model.getMetaManager().findEntityInfo(cls).getTableName();
RelationNode<QueryObject<T>, K> node = findFirst(NodeType.RELATION,
t -> t.getName().equals(rel),
() -> new RelationNode<>(cls, relLast).attach(this)
);
return node.setJoinType(JoinType.INNER);
}
public <K extends Model<K>> AttributeNode<QueryObject<T>, K> select(PropertyGetter<K> attr) {
return new AttributeNode<>(this, attr);
}
public <L extends Model<L>, R extends Model<R>> ConditionNode<QueryObject<T>, L, R> where(PropertyGetter<L> leftAttr, Func<String> op, PropertyGetter<R> rightAttr) {
if (whereRoot == null) {
return new ConditionNode<>(leftAttr, op, rightAttr, whereLast).attach(this);
}
return new ConditionNode<>(leftAttr, op, rightAttr, whereLast)
.setLogicalType(LogicalType.AND)
.attach(this);
}
public <K extends Model<K>> ConditionNode<QueryObject<T>, K, ?> where(PropertyGetter<K> attr
, Func<String> op
, Object... params) {
if (whereRoot == null) {
return new ConditionNode<>(attr, op, whereLast, params).attach(this);
} else {
return new ConditionNode<>(attr, op, whereLast, params)
.setLogicalType(LogicalType.AND)
.attach(this);
}
}
public ConditionNode<QueryObject<T>, T, ?> where(PropertyInfo attr
, Func<String> op
, Supplier<Object[]> params) {
if (whereRoot == null) {
return new ConditionNode<>(relRoot, attr, op, whereLast, params).attach(this);
} else {
return new ConditionNode<>(relRoot, attr, op, whereLast, params)
.setLogicalType(LogicalType.AND)
.attach(this);
}
}
public <L extends Model<L>, R extends Model<R>> ConditionNode<QueryObject<T>, L, R> and(PropertyGetter<L> leftAttr
, Func<String> op
, PropertyGetter<R> rightAttr) {
return new ConditionNode<>(leftAttr, op, rightAttr, whereLast)
.setLogicalType(LogicalType.AND)
.attach(this);
}
public <K extends Model<K>> ConditionNode<QueryObject<T>, K, ?> and(PropertyGetter<K> attr, Func<String>
op, Object... params) {
return new ConditionNode<>(attr, op, this.whereLast, params)
.setLogicalType(LogicalType.AND)
.attach(this);
}
public <L extends Model<L>, R extends Model<R>> ConditionNode<QueryObject<T>, L, R> or(PropertyGetter<L> leftAttr
, Func<String> op
, PropertyGetter<R> rightAttr) {
return new ConditionNode<>(leftAttr, op, rightAttr, whereLast)
.setLogicalType(LogicalType.OR)
.attach(this);
}
public <K extends Model<K>> ConditionNode<QueryObject<T>, K, ?> or(PropertyGetter<K> attr
, Func<String> op
, Object... params) {
return new ConditionNode<>(attr, op, this.whereLast, params)
.setLogicalType(LogicalType.OR)
.attach(this);
}
public <K extends Model<K>> OrderByNode<QueryObject<T>> orderBy(Class<K> rel, PropertyGetter<K> attr) {
if (orderByRoot == null) {
new OrderByNode<QueryObject<T>>().attach(this);
}
orderByRoot.asc(rel, attr);
return orderByRoot;
}
public <K extends Model<K>> OrderByNode<QueryObject<T>> orderByDesc(Class<K> rel, PropertyGetter<K> attr) {
if (orderByRoot == null) {
new OrderByNode<QueryObject<T>>().attach(this);
}
orderByRoot.desc(rel, attr);
return orderByRoot;
}
public <K extends Model<K>> GroupByNode<QueryObject<T>> groupBy(Class<K> rel, PropertyGetter<K> attr) {
if (groupByRoot == null) {
new GroupByNode<QueryObject<T>>().attach(this);
}
groupByRoot.add(rel, attr);
return groupByRoot;
}
public LimitNode<QueryObject<T>> limit(int start) {
if (limitRoot == null)
limitRoot = new LimitNode<QueryObject<T>>().attach(this);
limitRoot.setStart(start);
return limitRoot;
}
public QueryObject<T> limit(int start, int end) {
if (limitRoot == null)
limitRoot = new LimitNode<QueryObject<T>>().attach(this);
limitRoot.setStart(start).setEnd(end);
return this;
}
public <K extends Model<K>, V extends Model<V>> QueryObject<T> include(OperationContext context
, Class<K> thisCls
, Class<V> includeCls
, PropertyGetter<K> prop) {
this.context = context;
context.include(this, thisCls, includeCls, prop);
return this;
}
@SuppressWarnings("unchecked")
@Override
public void execute(Executor executor) {
String sql = toString();
System.out.println(sql);
ObjectHandler<T> handler = new ObjectHandler<>((Class<T>) relRoot.getEntityInfo().getType());
List<AttributeNode<?, ?>> attrList = getNodes()
.stream().filter(t -> t.getType() == NodeType.ATTRIBUTE)
.map(t -> (AttributeNode<?, ?>) t)
.collect(Collectors.toList());
attrList.forEach(t -> handler.add(t.getAlias(), t.getProp()));
executor.executeQuery(sql, handler, getParams().toArray());
setResult(new ResultData<>(handler.getAll()));
}
public void load() {
context.load(this.getId());
}
@SuppressWarnings("unchecked")
@Override
public String toString() {
this.getParams().clear();
StringBuilder sb = new StringBuilder();
sb.append(Token.SELECT);
List<AttributeNode<QueryObject<T>, ?>> attrList = getNodes()
.stream().filter(t -> t.getType() == NodeType.ATTRIBUTE)
.map(t -> (AttributeNode<QueryObject<T>, ?>) t)
.collect(Collectors.toList());
int attrSize = attrList.size();
if (attrSize == 0) {
List<RelationNode<QueryObject<T>, ?>> relList = getNodes()
.stream().filter(t -> t.getType() == NodeType.RELATION)
.map(t -> (RelationNode<QueryObject<T>, ?>) t)
.collect(Collectors.toList());
for (RelationNode<QueryObject<T>, ?> rel : relList) {
List<PropertyInfo> propList = rel.getEntityInfo().getPropList();
for (PropertyInfo prop : propList) {
AttributeNode<QueryObject<T>, ?> attrNode = new AttributeNode<>(rel, prop);
attrList.add(attrNode);
attrNode.attach(this);
}
}
}
attrSize = attrList.size();
for (int i = 0; i < attrSize; i++) {
SqlNode<?, ?> node = attrList.get(i);
node.toString(sb);
if (i < attrSize - 1)
sb.append(",");
}
sb.append(Token.FROM);
relRoot.toString(sb);
if (whereRoot != null) {
sb.append(Token.WHERE);
whereRoot.toString(sb);
}
if (groupByRoot != null) {
groupByRoot.toString(sb);
}
if (orderByRoot != null) {
orderByRoot.toString(sb);
}
if (limitRoot != null) {
limitRoot.toString(sb);
}
return sb.toString();
}
@SuppressWarnings("unchecked")
@Override
protected <K extends SqlNode<?, ?>> void doAttach(K node) {
switch (node.getType()) {
case NodeType.RELATION:
relLast = (RelationNode<QueryObject<T>, ?>) node;
break;
case NodeType.CONDITION:
ConditionNode<QueryObject<T>, ?, ?> whereNode = (ConditionNode<QueryObject<T>, ?, ?>) node;
whereRoot = whereRoot == null ? whereNode : whereRoot;
whereLast = whereNode;
break;
case NodeType.GROUP_BY:
GroupByNode<QueryObject<T>> groupByNode = (GroupByNode<QueryObject<T>>) node;
groupByRoot = groupByRoot == null ? groupByNode : groupByRoot;
break;
case NodeType.ORDER_BY:
OrderByNode<QueryObject<T>> orderByNode = (OrderByNode<QueryObject<T>>) node;
orderByRoot = orderByRoot == null ? orderByNode : orderByRoot;
break;
case NodeType.LIMIT:
LimitNode<QueryObject<T>> limitNode = (LimitNode<QueryObject<T>>) node;
limitRoot = limitRoot == null ? limitNode : limitRoot;
break;
}
}
}
|
/**
* Parse HTTP request query - generate request parameters. This filter is needed
* since WebSphere throws away parameters which lack values (e.g.
* ...?create&resource-uri=abc - create parameter gets discarded).
*/
public class WebSphereParametersFilter implements Filter {
public static final String CONTENT_TYPE_WWW_FORM_URLENCODED =
"application/x-www-form-urlencoded"; //$NON-NLS-1$
public void init(FilterConfig filterConfig) throws ServletException {
}
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
if (request instanceof HttpServletRequest) {
HttpServletRequest httpRequest = (HttpServletRequest)request;
Map<String, List<String>> paramMapWithList = new HashMap<String, List<String>>();
String query = httpRequest.getQueryString();
if (query != null && query.length() != 0) {
// parse query string
parseQuery(paramMapWithList, query);
}
String contentType = httpRequest.getContentType();
if (contentType != null && contentType.startsWith(CONTENT_TYPE_WWW_FORM_URLENCODED)) {
// parse form data
InputStreamReader isr = new InputStreamReader(httpRequest.getInputStream());
StringWriter sw = new StringWriter();
char[] buffer = new char[4096];
int len;
while ((len = isr.read(buffer)) > 0) {
sw.write(buffer, 0, len);
}
parseQuery(paramMapWithList, sw.toString());
}
if (!paramMapWithList.isEmpty()) {
// something parsed - convert map to String -> String[] map
Map<String, String[]> paramMap =
new HashMap<String, String[]>(paramMapWithList.size());
for (Map.Entry<String, List<String>> e : paramMapWithList.entrySet()) {
paramMap.put(e.getKey(), e.getValue().toArray(new String[e.getValue().size()]));
}
// create wrapping request and forward to chain
request = new ParametersFilterRequestWrapper(httpRequest, paramMap);
}
}
// no query present
chain.doFilter(request, response);
}
public void destroy() {
}
/**
* Parse query into String -> ArrayList<String> map.
*
* @param paramMap map of parameters (String -> ArrayList<String>)
* @param query query to parse
*/
private static void parseQuery(Map<String, List<String>> paramMap, String query) {
StringTokenizer tokenizer = new StringTokenizer(query, "&"); //$NON-NLS-1$
while (tokenizer.hasMoreTokens()) {
String name;
String value;
String token = tokenizer.nextToken();
int equal = token.indexOf('=');
if (equal != -1) {
name = UriEncoder.decodeString(token.substring(0, equal));
value = UriEncoder.decodeString(token.substring(equal + 1));
} else {
name = UriEncoder.decodeString(token);
value = ""; //$NON-NLS-1$
}
List<String> values = paramMap.get(name);
if (values == null) {
values = new ArrayList<String>(1);
}
values.add(value);
paramMap.put(name, values);
}
}
private static class ParametersFilterRequestWrapper extends HttpServletRequestWrapper {
private Map<String, String[]> paramMap;
ParametersFilterRequestWrapper(HttpServletRequest request, Map<String, String[]> paramMap) {
super(request);
this.paramMap = paramMap;
}
@Override
public String[] getParameterValues(String name) {
return paramMap.get(name);
}
@Override
public Enumeration<String> getParameterNames() {
return Collections.enumeration(paramMap.keySet());
}
@Override
public Map<String, String[]> getParameterMap() {
return Collections.unmodifiableMap(paramMap);
}
@Override
public String getParameter(String name) {
String[] v = getParameterValues(name);
return v == null ? null : v[0];
}
}
} |
"""
A Nash equilibrium is a two-player strategic game – similar to the Prisoner's
dilemma – that represents a "steady state" in which every player sees the
"best possible" outcome. However, this doesn't mean that the outcome linked to
a Nash equilibrium is the best overall. Nash equilibria are more subtle than
this. An informal definition of a Nash equilibrium is as follows: an action
profile in which no individual player can improve their outcome, assuming
that all other players adhere to the profile.
It is possible to explore the notion of Nash equilibrium with the classic
game of rock-paper-scissors. The rules are as follows. Each player can choose
one of the options: rock, paper, or scissors. Rock beats scissors, but loses
to paper; paper beats rock, but loses to scissors; scissors beats paper, but
loses to rock. Any game in which both players make the same choice is a draw.
Numerically, we represent a win by +1, a loss by -1, and a draw by 0. From
this, we can construct a two-player game and compute Nash equilibria for this
game.
This module illustrates how to compute Nash equilibria for the classic game
of rock-paper-scissors.
"""
import numpy as np
import nashpy as nash
rps_p1 = np.array([
[0, -1, 1], # rock payoff
[1, 0, -1], # paper payoff
[-1, 1, 0] # scissors payoff
])
rps_p2 = rps_p1.transpose()
rps = nash.Game(rps_p1, rps_p2)
equilibria = rps.support_enumeration()
for p1, p2 in equilibria:
print("Player 1: ", p1)
print("Player 2: ", p2)
|
Farmers and food executives appealed fruitlessly to federal officials yesterday for regulatory steps to limit speculative buying that is helping to drive food prices higher. Meanwhile, some Americans are stocking up on staples such as rice, flour and oil in anticipation of high prices and shortages spreading from overseas.
Their pleas did not find a sympathetic audience at the Commodity Futures Trading Commission (CFTC), where regulators said high prices are mostly the result of soaring world demand for grains combined with high fuel prices and drought-induced shortages in many countries.
The regulatory clash came amid evidence that a rash of headlines in recent weeks about food riots around the world has prompted some in the United States to stock up on staples.
Costco and other grocery stores in California reported a run on rice, which has forced them to set limits on how many sacks of rice each customer can buy. Filipinos in Canada are scooping up all the rice they can find and shipping it to relatives in the Philippines, which is suffering a severe shortage that is leaving many people hungry.
While farmers here and abroad generally are benefiting from the high prices, even they have been burned by a tidal wave of investors and speculators pouring into the futures markets for corn, wheat, rice and other commodities and who are driving up prices in a way that makes it difficult for farmers to run their businesses.
“Something is wrong,” said National Farmers Union President Tom Buis, adding that the CFTC’s refusal to rein in speculators will force farmers and consumers to take their case to Congress.
“It may warrant congressional intervention,” he said. “The public is all too aware of the recent credit crisis on Wall Street. We don’t want a lack of oversight and regulation to lead to a similar crisis in rural America.”
Food economists testifying at a daylong hearing of the commission said the doubling of rice and wheat prices in the past year is a result of strong income growth in China, India and other Asian countries, where people entering the middle class are buying more food and eating more meat. Farm animals consume a substantial share of the world’s grain.
U.S. wheat stocks are at the lowest levels in 60 years because worldwide consumption of wheat has exceeded production in six of the past eight years, said U.S. Agriculture Department chief economist Gerald Bange. Adding to tight supplies was the back-to-back failure of two years of wheat crops caused by drought in Australia, a major wheat exporter, he said.
In addition, the diversion of one-third of the U.S. corn crop into making ethanol for vehicles has increased prices for corn and other staples such as soybeans and cotton as more acreage is set aside for ethanol production.
Farmers also have raised prices because they have been hard hit by spiraling energy costs, which not only raised the price of diesel fuel to records of over $4 a gallon but drove up the cost of nitrogen fertilizer, which is made from natural gas.
“Commodity prices across the board are at levels not experienced in many of our lifetimes,” said CFTC Chairman Walter Lukken. “These price levels, along with record energy costs, have put a strain on consumers as well as many producers and commercial participants that utilize the futures markets to manage risks.”
The upswing in prices has been exaggerated by the massive influx of investors and speculators seeking to profit from rising prices for corn, wheat, oil, gold and other commodities. Big Wall Street firms and hedge funds have taken huge positions in futures markets that once were dominated by relatively small operators such as farmers and grain-elevator owners.
Small investors, who see fast-rising commodities as good hedges against inflation and a falling dollar, also are getting a piece of the action by investing in index funds that are tied to commodity prices.
“During such turbulent times, it is tempting to shoot first and ask questions later,” Mr. Lukken said, but he contended the commission should be “cautious” about doing anything to curb speculation. He and other regulators argued that speculators add volume and liquidity to the markets, which makes them operate more efficiently and helps farmers and other players.
Commissioner Michael V. Dunn said the soaring demand for food and fuel worldwide might be leading to permanently higher food prices, both domestically and abroad.
“We may already be working under or fast approaching a new paradigm of higher agricultural prices,” he said. “There is not a silver bullet or single solution to address the problems we are currently facing.”
FARM TRADE
Federal market regulators say the soaring price of most commodities over the past year reflects increased demand rather than investor speculation.
Rice 122%
Wheat* 95
Soybeans 83
Crude oil 82
Corn 66
Gasoline 41
Gold 37
Sugar 30
Coffee 24
Milk 5
Live cattle -7
Lumber -14
* On the Chicago Board of Trade
Source: Commodity Futures Trading Commission
Copyright © 2019 The Washington Times, LLC. Click here for reprint permission. |
/**
* Lives, divides to reproduce.
*
* User: mbs207
*/
public class Amoeboid extends Beast implements Runnable,Serializable {
final Ellipse2D shape;
transient NaturalSelection model;
transient NaturalTerrain nt;
//traits
Point2D good_terrain;
TerrainTypes affinity_terrain;
boolean moving = true;
Random brain = new Random();
int age;
int move_count=0;
double MOVE_PROB = 0.1;
int trait_count;
boolean grazing;
double vx, vy;
double radius;
boolean stops_to_eat, redirect, grazes, land_affinity, predator, conservative,swims, social, fast;
/**
* For creating from scratch, does not have any traits.
*
* @param x starting x position
* @param y starting y position
* @param parent the NaturalSelection program that this is running.
*/
public Amoeboid(double x, double y, NaturalSelection parent){
age = (int)(100*brain.nextDouble());
size = 8 + 4*brain.nextDouble();
radius = Math.sqrt(size);
MAX_VELOCITY = 0.5*brain.nextDouble();
reproductive_age = 105 - 5*brain.nextDouble();
loc = new Point2D.Double(x,y);
life = 5;
MAX_LIFE=size*20;
shape = new Ellipse2D.Double(loc.getX() - 0.5*radius,loc.getY() - 0.5*radius, radius, radius);
double theta = 2*Math.PI*brain.nextDouble();
vx = MAX_VELOCITY *Math.sin(theta);
vy = MAX_VELOCITY *Math.cos(theta);
dead = false;
traits = new HashSet<BeastTraits>();
initializeTraits();
consume = 1 + size + MAX_VELOCITY;
trait_count = traits.size();
setParent(parent);
}
/**
* Creates a descendant of the ancestor. This will allow
* more variation in mutiations and such, and the addition
* of traits.
*
* @param ancestor this amoeboid that 'gave birth' to this one.
*/
public Amoeboid(Amoeboid ancestor){
age = 0;
reproductive_age = ancestor.reproductive_age + 5 - 10*brain.nextDouble();
size = ancestor.size + 2 - 4*brain.nextDouble();
size = size<=1?1:size;
radius = Math.sqrt(size);
reproductive_age = reproductive_age<10 + size*5?10 + 5*size:reproductive_age;
MAX_VELOCITY = ancestor.MAX_VELOCITY + 0.05 - 0.1*brain.nextDouble();
MAX_VELOCITY = MAX_VELOCITY>0?MAX_VELOCITY:0;
loc = new Point2D.Double(ancestor.loc.getX(), ancestor.loc.getY());
MAX_LIFE=size*10;
life = ancestor.life;
double radius = Math.sqrt(size);
shape = new Ellipse2D.Double(loc.getX() - 0.5*radius,loc.getY() - 0.5*radius, radius, radius);
double theta = 2*Math.PI*brain.nextDouble();
vx = MAX_VELOCITY *Math.sin(theta);
vy = MAX_VELOCITY *Math.cos(theta);
inheritTraits(ancestor);
initializeTraits();
consume = 1 + size + MAX_VELOCITY + traits.size();
if(trait_count>ancestor.trait_count)
c = new Color((int)((1<<24)*(brain.nextDouble())));
else
c = ancestor.c;
dead = false;
setParent(ancestor.model);
}
/**
* Goes through the ancestors traits and other traits to decide
* if the trait will exists. Then initializes associated variables.
* @param ancestor
*/
private void inheritTraits(Amoeboid ancestor){
traits = new HashSet<BeastTraits>();
traits.addAll(ancestor.traits);
for(BeastTraits t: BeastTraits.values()){
if(ancestor.traits.contains(t)){
if(brain.nextDouble()<0.99)
traits.add(t);
} else if(brain.nextDouble()<0.001){
traits.add(t);
}
}
if(traits.contains(BeastTraits.land_affinity)){
if(ancestor.traits.contains(BeastTraits.land_affinity)){
affinity_terrain = ancestor.affinity_terrain;
good_terrain = ancestor.good_terrain;
}else
chooseLand();
}
}
public void initializeTraits(){
predator = traits.contains(BeastTraits.predator);
stops_to_eat = !predator||traits.contains(BeastTraits.omnivore);
redirect = traits.contains(BeastTraits.redirect);
grazes = traits.contains(BeastTraits.graze);
land_affinity = traits.contains(BeastTraits.land_affinity);
conservative = traits.contains(BeastTraits.conservation);
swims = affinity_terrain==TerrainTypes.water;
social = traits.contains(BeastTraits.social);
fast = traits.contains(BeastTraits.fast);
trait_count = traits.size();
grazing = false;
}
/**
* for land affinity, selects with land to be attracted too.
*/
private void chooseLand(){
TerrainTypes[] tt = TerrainTypes.values();
affinity_terrain=tt[(int)(brain.nextDouble()*tt.length)];
good_terrain = loc;
}
public void paint(Graphics2D g){
g.setColor(c);
shape.setFrame(loc.getX() - 0.5*radius,loc.getY() - 0.5*radius, radius, radius);
g.draw(shape);
}
/**
* Follows logic depending on traits. If it is moving
* it continues to move, if it is not moving it eats.
* If it is a predator it kills things. If it follows or
* redirects it does so.
*
*/
public void interact(){
age++;
double f = nt.getFood(loc);
if(moving){
life-=0.2*consume;
move_count++;
if(stops_to_eat&&(f>2*consume||life<2)){
moving=false;
} else if(redirect){
redirect();
}
if(grazes){
grazing=life<MAX_LIFE/2&&life>2;
if(grazing)
eatFood();
}
} else{
life-=0.1*consume;
if(f>0){
eatFood();
} else{
life-=0.1*consume;
move_count = 0;
moving=true;
}
}
if(life<=0)
die();
if(age>reproductive_age)
birth();
if(land_affinity){
if(affinity_terrain != null && affinity_terrain.compareTo(nt.getType(loc))==0){
good_terrain=new Point2D.Double(loc.getX(), loc.getY());
}
}
if(predator){
for(Beast b: nt.getNeighbors(loc)){
kill(b);
if(life==MAX_LIFE&&conservative)
break;
}
}
}
/**
* Tries to eat its weight in food.
*
*/
void eatFood(){
if(stops_to_eat){
double v = consume;
if(conservative)
v = consume>MAX_LIFE-life?MAX_LIFE-life:consume;
double y = nt.eat(loc,v);
y = y<size?y:size;
life+=y;
life = life>MAX_LIFE?MAX_LIFE:life;
}
}
/**
* tries to kill the other beast.
*
* @param b the beast to be killed.
*/
void kill(Beast b){
if(b.c==c)
return;
if(!b.dead&&b.size<size&&shape.contains(b.loc)){
if(b.traits.contains(BeastTraits.evasive)&&brain.nextDouble()<0.25)
return;
model.killBeast(b);
b.dead=true;
life += b.life + b.size;
life = life>MAX_LIFE?MAX_LIFE:life;
}
}
/**
* moves towards another beast. Intended to mimic
* heard-like behavior.
*
* @param b followed
*/
void follow(Beast b){
moveTowards(b.loc);
}
/**
* Goes towards a specific point, used by two different traits.
*
* @param target
*/
void moveTowards(Point2D target){
double dx = target.getX() - loc.getX();
double dy = target.getY() - loc.getY();
double mag = Math.sqrt(Math.pow(dx,2) + Math.pow(dy,2));
if(mag>MAX_VELOCITY){
vx = MAX_VELOCITY*dx/mag;
vy = MAX_VELOCITY*dy/mag;
} else if(mag>0){
vx = dx;
vy = dy;
}
}
/**
* Moves in a random direction.
*
*/
public void changeDirections(){
double theta = 2*Math.PI*brain.nextDouble();
vx = MAX_VELOCITY *Math.sin(theta);
vy = MAX_VELOCITY *Math.cos(theta);
}
/**
* Creates a new Amoeboid
*/
void birth(){
life = life/2;
Amoeboid kin = new Amoeboid(this);
model.addBeast(kin);
age = 0;
}
/**
* dies and informs the model of death.
*/
void die(){
model.killBeast(this);
dead = true;
}
/**For checking bounds w/out access to shape.*/
public Rectangle2D getBounds(){
return shape.getBounds();
}
/** moves */
public void run(){
if(moving){
move();
}
}
/** moves */
public void move(){
double tmod = 1;
switch(nt.getType(loc)){
case water:
if(!swims)
tmod = 0.5;
break;
}
if(fast)
tmod+=2;
if(grazing)
tmod*=0.5;
Rectangle2D rect = new Rectangle2D.Double(loc.getX() + tmod*vx - radius*0.5, loc.getY() + tmod*vy - radius*0.5, radius, radius);
if(model.checkBounds(rect)){
loc.setLocation(loc.getX() + tmod*vx, loc.getY() + tmod*vy);
shape.setFrame(rect);
} else{
changeDirections();
}
}
/** changes direction */
public void redirect(){
double test = brain.nextDouble();
double check=0;
if(land_affinity&&affinity_terrain.compareTo(nt.getType(loc))!=0){
check += 0.2*move_count;
if(test<check){
moveTowards(good_terrain);
move_count=0;
return;
}
}else if(social){
check += 0.2*move_count;
if(test < check){
for(Beast b: nt.getNeighbors(loc)){
if(b!=this&&b.c==c){
follow(b);
move_count=0;
return;
}
}
}
}else if(brain.nextDouble()<MOVE_PROB*move_count + check){
changeDirections();
move_count = 0;
}
}
public void setParent(NaturalSelection ns){
model = ns;
nt = ns.terrain;
if(nt.checkBounds(shape.getBounds()))
model.scheduleBeastAction(this,50l);
else
die();
}
} |
<filename>src/MaybeConverter.ts
import {Composable} from './Composable'
import {MaybeSelector} from './MaybeSelector'
import {Selector} from './Selector'
import {Converter} from './Converter'
import {Get, GetSignature} from './Get'
import {Set} from './Set'
import { Extension } from './Extension';
import { Memoize } from './Memoize';
import { Debug } from './Debug';
import { MaybeGet } from './MaybeGet';
import { Dimensionality, Structure } from './Discriminants';
export interface MaybeConverterCompose<A, B, Params> {
<C, BCParams>(other: Get<B, C, BCParams>): MaybeGet<A, C, Params & BCParams>
<C, BCParams>(other: MaybeGet<B, C, BCParams>): MaybeGet<A, C, Params & BCParams>
<C, BCParams>(other: MaybeSelector<B, C, BCParams>): MaybeSelector<A, C, Params & BCParams>
<C, BCParams>(other: Selector<B, C, BCParams>): MaybeSelector<A, C, Params & BCParams>
<C, BCParams>(other: MaybeConverter<B, C, BCParams>): MaybeConverter<A, C, Params & BCParams>
<C, BCParams>(other: Converter<B, C, BCParams>): MaybeConverter<A, C, Params & BCParams>
}
export interface MaybeConverter<A, B, Params extends {} = {}> {
_structure: Structure.Convert
_dimensionality: Dimensionality.Maybe
type: "maybeConverter"
extend: (ext: Extension) => MaybeConverter<A, B, Params>
get: MaybeGet<A, B, Params>
reverseGet: Get<B, A, Params>
compose: MaybeConverterCompose<A, B, Params>
withDefault: (ifNull: (GetSignature<A, B, Params> | Get<A, B, Params>)) => Converter<A, B, Params>
withDefaultValue: (ifNull: B) => Converter<A, B, Params>
memoize: () => MaybeConverter<A, B, Params>
debug: () => MaybeConverter<A, B, Params>
}
export namespace MaybeConverter {
export const fromGets = <A, B, Params extends {} = {}>(get: (a: A, p: Params) => B | null, reverseGet: (b: B, p: Params) => A) =>
create(MaybeGet.create(get), Get.create(reverseGet))
export const wrapSet = <A, B, C, ABParams, BCParams>(get: MaybeGet<A, B, ABParams>, reverseGet: Get<B, A, ABParams>, set: Set<B, C, BCParams>) =>
Set.create<A, C, ABParams & BCParams>((a, p, c) => {
const b = get._underlying(a, p)
if (b === null) {
return a
} else {
return reverseGet._underlying(set._underlying(b, p, c), p)
}
})
export const create = <A, B, Params extends {} = {}>(get: MaybeGet<A, B, Params>, reverseGet: Get<B, A, Params>, ext: Extension = Extension.none): MaybeConverter<A, B, Params> => {
get = get.extend(ext)
reverseGet = reverseGet.extend(ext)
const extend = (newExtension: Extension) =>
create(get, reverseGet, Extension.combine(ext, newExtension))
const compose: any = <C, BCParams>(other: Composable<B, C, BCParams>) => {
if (Composable.is(Dimensionality.Single, Structure.Get, other)) {
return get.compose(other)
} else if (Composable.is(Dimensionality.Maybe, Structure.Get, other)) {
return get.compose(other)
} else if (Composable.is(Dimensionality.Single, Structure.Select, other)) {
return MaybeSelector.create(get.compose(other), wrapSet(get, reverseGet, other.set), ext)
} else if (Composable.is(Dimensionality.Maybe, Structure.Select, other)) {
return MaybeSelector.create(get.compose(other), wrapSet(get, reverseGet, other.set), ext)
} else if (Composable.is(Dimensionality.Single, Structure.Convert, other)) {
return create(get.compose(other), other.reverseGet.compose(reverseGet), ext)
} else if (Composable.is(Dimensionality.Maybe, Structure.Convert, other)) {
return create(get.compose(other), other.reverseGet.compose(reverseGet), ext)
}
}
const withDefault = (ifNull: (GetSignature<A, B, Params> | Get<A, B, Params>)): Converter<A, B, Params> =>
Converter.create<A, B, Params>(get.withDefault(ifNull), reverseGet, ext)
const withDefaultValue = (ifNull: B): Converter<A, B, Params> =>
withDefault(Get.create<A, B, Params>(_ => ifNull, ext))
const maybeConverter: MaybeConverter<A, B, Params> = {
_structure: Structure.Convert,
_dimensionality: Dimensionality.Maybe,
type: "maybeConverter",
extend,
get,
reverseGet,
compose,
withDefault,
withDefaultValue,
memoize: () => extend(Memoize()),
debug: () => extend(Debug())
}
ext.apply(maybeConverter)
return maybeConverter
}
}
|
def html(self):
html = markdown.markdown('\n'.join(self.body))
if self.style:
return premailer.transform('<style>\n' + self.style +
'\n</style>\n' + html)
return html |
// evalCompositeStructLit evaluates a composite struct literal.
func (sc *scope) evalCompositeStructLit(cl *ast.CompositeLit) reflect.Value {
val := reflect.New(sc.typeOf(cl.Type))
for idx, elt := range cl.Elts {
switch expr := elt.(type) {
case *ast.KeyValueExpr:
switch kexpr := expr.Key.(type) {
case *ast.Ident:
key := kexpr.Name
exprval := sc.evalExpr(expr.Value, nil)[0]
val.Elem().FieldByName(key).Set(exprval)
default:
sc.err("cannot handle key expression of type %T in composite literal", expr.Key)
}
default:
val.Elem().Field(idx).Set(sc.evalExpr(elt, nil)[0])
}
}
return val.Elem()
} |
def template_staging_directory(staging_directory, problem):
dont_template = copy(problem.dont_template) + [
"app/templates",
"problem.json",
"challenge.py",
"templates",
"__pre_templated",
]
dont_template_files = list(filter(isfile, dont_template))
dont_template_directories = list(filter(isdir, dont_template))
dont_template_directories = [
join(staging_directory, directory) for directory in dont_template_directories
]
for root, dirnames, filenames in os.walk(staging_directory):
if any(
os.path.commonprefix([root, path]) == path
for path in dont_template_directories
):
logger.debug(
"....Not templating anything in the directory '{}'".format(root)
)
continue
for filename in filenames:
if filename in dont_template_files:
logger.debug("....Not templating the file '{}'".format(filename))
continue
fullpath = join(root, filename)
try:
template_file(fullpath, fullpath, **get_attributes(problem))
except UnicodeDecodeError as e:
pass |
inp=input()
lst=list()
for i in inp.split(' '):
lst.append(int(i))
lst.sort(reverse=True)
sum=lst[0]
a=lst[0]-lst[1]
b=lst[0]-lst[2]
c=lst[0]-lst[3]
print(a,b,c)
|
def all_true(results):
return filter(lambda r: r.value, results) |
/**
* Helper base class for comparison functions like max and min.
*/
public abstract class CompareFunction extends ArrayMathFunction {
public CompareFunction() {
super(ArgumentConstraints.typeOf(JmesPathType.NUMBER, JmesPathType.STRING));
}
/**
* Subclasses override this method to decide whether the greatest or least
* element sorts first.
*/
protected abstract boolean sortsBefore(int compareResult);
@Override
protected <T> T performMathOperation(Adapter<T> runtime, List<T> values) {
if (values.isEmpty()) {
return runtime.createNull();
} else {
Iterator<T> vs = values.iterator();
T result = vs.next();
while (vs.hasNext()) {
T candidate = vs.next();
if (sortsBefore(runtime.compare(candidate, result))) {
result = candidate;
}
}
return result;
}
}
} |
The design of a rotating associative array memory for a relational data base management application
There are significant advantages to tailoring hardware storage devices to support high level data models in very large data bases. A storage device that can assume some of the data selection functions traditionally performed by the CPU can substantially reduce the amount of data to be transferred to the CPU. This reduction together with increased concurrency of CPU and device operation result in increased data rates and lowor response times By designing the device to support one specific data model, greator efficiency can be achieved than in devices dosigned to be a compromise in their support of several different models. The number and complexity Of the functions performed by the device can be drastically reduced, along with its development and production costs. In this paper we describe the design and usage Of an associative array memory using a rotating storage device which is tailored to support the relational model of E. F. Codd. |
File photo of United Launch Alliance launching an Atlas V rocket with an United States Air Force OTV-4 onboard from Cape Canaveral Air Force Station, Florida, May 20, 2015. REUTERS/Michael Brown
WASHINGTON (Reuters) - The Pentagon said on Friday its inspector general will investigate comments made by a former executive with United Launch Alliance (ULA) who suggested the department had improperly tipped a competition for rocket launches in its favor.
Defense Secretary Ash Carter has referred the matter to the Pentagon’s independent watchdog, on the recommendation of chief arms buyer Frank Kendall and Air Force Secretary Deborah James, Pentagon spokesman Peter Cook said.
“The Secretary is concerned by recent statements regarding competition for national security space launch,” Cook said.
John McCain, chairman of the U.S. Senate Armed Services Committee, on Thursday urged Carter to investigate what he called troubling remarks by Brett Tobey, who resigned Wednesday as vice president of engineering for ULA. The firm, a joint venture of Lockheed Martin Corp and Boeing Co, has been the sole provider for U.S. military launches for nearly a decade.
Tobey said during a lecture at Colorado university on Wednesday that the Defense Department “bent over backwards to lean the field” to ULA’s advantage in a competition with new market entrant Space Exploration Technologies, or SpaceX, owned by billionaire Elon Musk.
Tobey also said the Pentagon was trying to figure out “how do we silence McCain,” who has urged the government to penalize ULA for failing to bid in the competition despite receiving $800 million in government funding for launch services every year, on top of its actual launch contracts.
ULA has said Tobey’s comments were “inaccurate.” The sole provider for U.S. military launches for nearly a decade, ULA is scrambling to restructure so it can compete with SpaceX. |
/**
* generate a random string with the characters defined by the salt string
* @param length the length of the string to be generated
* @return random generated string
*/
public String generateRandomString(final int length) {
if (length < 1) {
throw new IllegalArgumentException("length < 1: " + length);
}
final StringBuilder salt = new StringBuilder();
while (salt.length() < length) {
final int index = (int) (random.nextFloat() * saltchars.length());
salt.append(saltchars.charAt(index));
}
return salt.toString();
} |
package graphql.language;
import java.util.List;
public class AstComparator {
public boolean isEqual(Node node1, Node node2) {
if (!node1.isEqualTo(node2)) return false;
List<Node> childs1 = node1.getChildren();
List<Node> childs2 = node2.getChildren();
if (childs1.size() != childs2.size()) return false;
for (int i = 0; i < childs1.size(); i++) {
if (!isEqual(childs1.get(i), childs2.get(i))) return false;
}
return true;
}
}
|
<reponame>joshhunt/bungie-api-golang
package bungieapigo
// This component returns references to all of the Vendors in the response, grouped by
// categorizations that Bungie has deemed to be interesting, in the order in which both the groups
// and the vendors within that group should be rendered.
type DestinyVendorGroupComponent struct {
// The ordered list of groups being returned.
Groups []DestinyVendorGroup `json:"groups"`
}
|
package s3inventory
import (
"fmt"
"time"
"github.com/cznic/mathutil"
"github.com/go-openapi/swag"
"github.com/spf13/cast"
"github.com/xitongsys/parquet-go/parquet"
"github.com/xitongsys/parquet-go/reader"
"github.com/xitongsys/parquet-go/schema"
)
type ParquetInventoryFileReader struct {
*reader.ParquetReader
nextRow int64
fieldToParquetPath map[string]string
}
func NewParquetInventoryFileReader(parquetReader *reader.ParquetReader) (*ParquetInventoryFileReader, error) {
fieldToParquetPath := getParquetPaths(parquetReader.SchemaHandler)
for _, required := range requiredFields {
if _, ok := fieldToParquetPath[required]; !ok {
return nil, fmt.Errorf("%w: %s", ErrRequiredFieldNotFound, required)
}
}
return &ParquetInventoryFileReader{
ParquetReader: parquetReader,
fieldToParquetPath: fieldToParquetPath,
}, nil
}
func (p *ParquetInventoryFileReader) Close() error {
p.ReadStop()
return p.PFile.Close()
}
func (p *ParquetInventoryFileReader) getKeyColumnStatistics() *parquet.Statistics {
for i, c := range p.Footer.RowGroups[0].Columns {
if c.MetaData.PathInSchema[len(c.GetMetaData().GetPathInSchema())-1] == "Key" {
return p.Footer.RowGroups[0].Columns[i].GetMetaData().GetStatistics()
}
}
return p.Footer.RowGroups[0].Columns[1].GetMetaData().GetStatistics()
}
func (p *ParquetInventoryFileReader) FirstObjectKey() string {
return string(p.getKeyColumnStatistics().GetMin())
}
func (p *ParquetInventoryFileReader) LastObjectKey() string {
return string(p.getKeyColumnStatistics().GetMax())
}
func (p *ParquetInventoryFileReader) Read(n int) ([]*InventoryObject, error) {
num := mathutil.MinInt64(int64(n), p.GetNumRows()-p.nextRow)
p.nextRow += num
res := make([]*InventoryObject, num)
for fieldName, path := range p.fieldToParquetPath {
columnRes, _, dls, err := p.ReadColumnByPath(path, num)
if err != nil {
return nil, fmt.Errorf("failed to read parquet column %s: %w", fieldName, err)
}
for i, v := range columnRes {
if !isRequired(fieldName) && dls[i] == 0 {
// got no value for non-required field, move on
continue
}
if res[i] == nil {
res[i] = NewInventoryObject()
}
err := set(res[i], fieldName, v)
if err != nil {
return nil, fmt.Errorf("failed to read parquet column %s: %w", fieldName, err)
}
}
}
return res, nil
}
func set(o *InventoryObject, f string, v interface{}) error {
var err error
switch f {
case bucketFieldName:
o.Bucket, err = cast.ToStringE(v)
case keyFieldName:
o.Key, err = cast.ToStringE(v)
case isLatestFieldName:
o.IsLatest, err = cast.ToBoolE(v)
case isDeleteMarkerFieldName:
o.IsDeleteMarker, err = cast.ToBoolE(v)
case sizeFieldName:
o.Size, err = cast.ToInt64E(v)
case lastModifiedDateFieldName:
var lastModifiedMillis int64
lastModifiedMillis, err = cast.ToInt64E(v)
seconds := lastModifiedMillis / int64(time.Second/time.Millisecond)
ns := (lastModifiedMillis % 1000) * int64(time.Millisecond/time.Nanosecond)
o.LastModified = swag.Time(time.Unix(seconds, ns))
case eTagFieldName:
o.Checksum, err = cast.ToStringE(v)
default:
return fmt.Errorf("%w: %s", ErrUnknownField, f)
}
return err
}
// getParquetPaths returns parquet schema fields as a mapping from their base column name to their path in ParquetReader
// only known inventory fields are returned
func getParquetPaths(schemaHandler *schema.SchemaHandler) map[string]string {
res := make(map[string]string)
for i, fieldInfo := range schemaHandler.Infos {
for _, field := range inventoryFields {
if fieldInfo.ExName == field {
res[field] = schemaHandler.IndexMap[int32(i)]
}
}
}
return res
}
|
Just a Dare or Unaware? Outcomes and Motives of Drugging (“Drink Spiking”) Among Students at Three College Campuses
Objective: Drugging (administering a drug to someone without their knowledge or consent) is acknowledged as a problem in “watch your drink” campaigns. However, research on this phenomenon is nascent. Prior research has primarily focused on drugging as a means of sexual assault, and has not addressed drugging more generally. Method: Survey data from 6,064 students at 3 universities was used to explore drugging among those who had drugged someone (or knew someone who had) and those who had been drugged. Results: More than 1 in 13 students reported being drugged (462 students, 7.8% of the sample, reported 539 incidents), and 83 students (1.4%) reported 172 incidents of drugging someone. Participants’ perceptions of why people drug others varied by gender. Women were much more likely to mention sex or sexual assault as a motive, while men were more likely to mention having fun as a motive. Participants also mentioned getting others more drunk or high and getting someone to relax as motives. It is possible that some motives (e.g., “to ‘loosen’ me up”) could be euphemisms for more coercive or sexual motives not directly stated. Outcomes for those drugged were also gendered, with female victims experiencing more negative outcomes, including sexual assault, blacking out, and getting sick. Although over 4 out of 5 of victims reported negative outcomes, a small number of (mostly male) victims said they enjoyed being drugged. Conclusions: To design interventions to prevent the negative consequences of drugging, the full context of drugging must be better understood. |
/**
* Tests event processor is active after HMS restarts.
*/
@Test
public void testEventProcessorFetchAfterHMSRestart() throws ImpalaException {
CatalogServiceCatalog catalog = CatalogServiceTestCatalog.create();
CatalogOpExecutor catalogOpExecutor = new CatalogOpExecutor(catalog,
new NoopAuthorizationFactory().getAuthorizationConfig(),
new NoopAuthorizationManager());
MetastoreEventsProcessor fetchProcessor =
new HMSFetchNotificationsEventProcessor(catalogOpExecutor,
eventsProcessor_.getCurrentEventId(), 2L);
fetchProcessor.start();
try {
assertEquals(EventProcessorStatus.ACTIVE, fetchProcessor.getStatus());
while (true) {
try {
fetchProcessor.getNextMetastoreEvents();
} catch (MetastoreNotificationFetchException ex) {
break;
}
}
assertEquals(EventProcessorStatus.ACTIVE, fetchProcessor.getStatus());
} finally {
fetchProcessor.shutdown();
}
} |
/* *****************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.github.tommyettinger.colorful.rgb;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Rectangle;
import com.github.tommyettinger.colorful.FloatColors;
import static com.github.tommyettinger.colorful.rgb.ColorfulBatch.*;
/** Holds the geometry, color, and texture information for drawing 2D sprites using {@link ColorfulBatch}. A ColorfulSprite has a position and a
* size given as width and height. The position is relative to the origin of the coordinate system specified via
* {@link ColorfulBatch#begin()} and the respective matrices. A ColorfulSprite is always rectangular and its position (x, y) are located in the
* bottom left corner of that rectangle. A ColorfulSprite also has an origin around which rotations and scaling are performed (that is,
* the origin is not modified by rotation and scaling). The origin is given relative to the bottom left corner of the ColorfulSprite, its
* position.
* @author mzechner
* @author <NAME>
* @author <NAME>
*/
public class ColorfulSprite extends TextureRegion {
public static final int VERTEX_SIZE = 2 + 1 + 2 + 1;
public static final int SPRITE_SIZE = 4 * VERTEX_SIZE;
private final float[] vertices = new float[SPRITE_SIZE];
private float x, y;
private float width, height;
private float originX, originY;
private float rotation;
private float scaleX = 1, scaleY = 1;
private boolean dirty = true;
private Rectangle bounds;
/** Creates an uninitialized sprite. The sprite will need a texture region and bounds set before it can be drawn. */
public ColorfulSprite() {
setTweakedColor(Palette.GRAY, TWEAK_RESET);
}
/** Creates a sprite with width, height, and texture region equal to the size of the texture.
* @param texture A Texture that will be used in full for this ColorfulSprite.
*/
public ColorfulSprite(Texture texture) {
this(texture, 0, 0, texture.getWidth(), texture.getHeight());
}
/** Creates a sprite with width, height, and texture region equal to the specified size. The texture region's upper left corner
* will be 0,0.
* @param texture A Texture that will have some of its area used for this ColorfulSprite, starting at 0,0 in the upper left.
* @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn.
* @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */
public ColorfulSprite(Texture texture, int srcWidth, int srcHeight) {
this(texture, 0, 0, srcWidth, srcHeight);
}
/** Creates a sprite with width, height, and texture region equal to the specified size.
* @param texture A Texture that will have some of its area used for this ColorfulSprite, starting at srcX,srcY in the upper left.
* @param srcX The x-coordinate for the upper left corner of the region to use.
* @param srcY The y-coordinate for the upper left corner of the region to use.
* @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn.
* @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */
public ColorfulSprite(Texture texture, int srcX, int srcY, int srcWidth, int srcHeight) {
if (texture == null) throw new IllegalArgumentException("texture cannot be null.");
setTexture(texture);
setRegion(srcX, srcY, srcWidth, srcHeight);
setTweakedColor(Palette.GRAY, TWEAK_RESET);
setSize(Math.abs(srcWidth), Math.abs(srcHeight));
setOrigin(width * 0.5f, height * 0.5f);
}
/** Creates a sprite based on a specific TextureRegion, the new sprite's region is a copy of the parameter region - altering one
* does not affect the other
* @param region A TextureRegion that will have relevant data copied into this ColorfulSprite.
*/
public ColorfulSprite(TextureRegion region) {
setRegion(region);
setTweakedColor(Palette.GRAY, TWEAK_RESET);
setSize(region.getRegionWidth(), region.getRegionHeight());
setOrigin(width * 0.5f, height * 0.5f);
}
/** Creates a sprite with width, height, and texture region equal to the specified size, relative to specified sprite's texture
* region.
* @param region A TextureRegion that this will use for its Texture and as a basis for the relative coordinates in that Texture.
* @param srcX Number of pixels to add to the texture coordinates of {@code region} on the x-axis.
* @param srcY Number of pixels to add to the texture coordinates of {@code region} on the y-axis.
* @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn.
* @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */
public ColorfulSprite(TextureRegion region, int srcX, int srcY, int srcWidth, int srcHeight) {
setRegion(region, srcX, srcY, srcWidth, srcHeight);
setTweakedColor(Palette.GRAY, TWEAK_RESET);
setSize(Math.abs(srcWidth), Math.abs(srcHeight));
setOrigin(width * 0.5f, height * 0.5f);
}
/** Creates a colorfulSprite that is a copy in every way of the specified colorfulSprite.
* @param colorfulSprite A ColorfulSprite that will be copied exactly.
*/
public ColorfulSprite(ColorfulSprite colorfulSprite) {
set(colorfulSprite);
}
/** Make this colorfulSprite a copy in every way of the specified colorfulSprite
* @param colorfulSprite A ColorfulSprite that will be copied exactly.
*/
public void set (ColorfulSprite colorfulSprite) {
if (colorfulSprite == null) throw new IllegalArgumentException("colorfulSprite cannot be null.");
System.arraycopy(colorfulSprite.vertices, 0, vertices, 0, SPRITE_SIZE);
setRegion(colorfulSprite);
x = colorfulSprite.x;
y = colorfulSprite.y;
width = colorfulSprite.width;
height = colorfulSprite.height;
setRegionWidth(colorfulSprite.getRegionWidth());
setRegionHeight(colorfulSprite.getRegionHeight());
originX = colorfulSprite.originX;
originY = colorfulSprite.originY;
rotation = colorfulSprite.rotation;
scaleX = colorfulSprite.scaleX;
scaleY = colorfulSprite.scaleY;
setTweakedColor(colorfulSprite.vertices[C1], colorfulSprite.vertices[T1]);
if(colorfulSprite.bounds != null)
bounds = new Rectangle(colorfulSprite.bounds);
dirty = colorfulSprite.dirty;
}
/** Sets the position and size of the sprite when drawn, before scaling and rotation are applied. If origin, rotation, or scale
* are changed, it is slightly more efficient to set the bounds after those operations.
* @param x The x-position of the ColorfulSprite in world space.
* @param y The y-position of the ColorfulSprite in world space.
* @param width The width to display the ColorfulSprite with.
* @param height The height to display the ColorfulSprite with.
*/
public void setBounds (float x, float y, float width, float height) {
this.x = x;
this.y = y;
this.width = width;
this.height = height;
if (dirty) return;
float x2 = x + width;
float y2 = y + height;
float[] vertices = this.vertices;
vertices[X1] = x;
vertices[Y1] = y;
vertices[X2] = x;
vertices[Y2] = y2;
vertices[X3] = x2;
vertices[Y3] = y2;
vertices[X4] = x2;
vertices[Y4] = y;
if (rotation != 0 || scaleX != 1 || scaleY != 1) dirty = true;
}
/** Sets the size of the sprite when drawn, before scaling and rotation are applied. If origin, rotation, or scale are changed,
* it is slightly more efficient to set the size after those operations. If both position and size are to be changed, it is
* better to use {@link #setBounds(float, float, float, float)}.
* @param width The width to display the ColorfulSprite with.
* @param height The height to display the ColorfulSprite with.
*/
public void setSize (float width, float height) {
this.width = width;
this.height = height;
if (dirty) return;
float x2 = x + width;
float y2 = y + height;
float[] vertices = this.vertices;
vertices[X1] = x;
vertices[Y1] = y;
vertices[X2] = x;
vertices[Y2] = y2;
vertices[X3] = x2;
vertices[Y3] = y2;
vertices[X4] = x2;
vertices[Y4] = y;
if (rotation != 0 || scaleX != 1 || scaleY != 1) dirty = true;
}
/** Sets the position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient
* to set the position after those operations. If both position and size are to be changed, it is better to use
* {@link #setBounds(float, float, float, float)}.
* @param x The x-position of the ColorfulSprite in world space.
* @param y The y-position of the ColorfulSprite in world space.
*/
public void setPosition (float x, float y) {
translate(x - this.x, y - this.y);
}
/** Sets the position where the sprite will be drawn, relative to its current origin.
* @param x The adjustment to make to the x-position, relative to the current origin.
* @param y The adjustment to make to the y-position, relative to the current origin.
*/
public void setOriginBasedPosition (float x, float y) {
setPosition(x - this.originX, y - this.originY);
}
/** Sets the x position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient
* to set the position after those operations. If both position and size are to be changed, it is better to use
* {@link #setBounds(float, float, float, float)}.
* @param x The x-position of the ColorfulSprite in world space.
*/
public void setX (float x) {
translateX(x - this.x);
}
/** Sets the y position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient
* to set the position after those operations. If both position and size are to be changed, it is better to use
* {@link #setBounds(float, float, float, float)}.
* @param y The y-position of the ColorfulSprite in world space.
*/
public void setY (float y) {
translateY(y - this.y);
}
/** Sets the x position so that it is centered on the given x parameter.
* @param x The x-position of the center of the ColorfulSprite in world space.
*/
public void setCenterX (float x) {
setX(x - width * 0.5f);
}
/** Sets the y position so that it is centered on the given y parameter.
* @param y The y-position of the center of the ColorfulSprite in world space.
*/
public void setCenterY (float y) {
setY(y - height * 0.5f);
}
/** Sets the position so that the sprite is centered on (x, y).
* @param x The x-position of the center of the ColorfulSprite in world space.
* @param y The y-position of the center of the ColorfulSprite in world space.
*/
public void setCenter (float x, float y) {
setCenterX(x);
setCenterY(y);
}
/** Sets the x position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are
* changed, it is slightly more efficient to translate after those operations.
* @param xAmount How much to move the ColorfulSprite on the x-axis, in world space.
*/
public void translateX (float xAmount) {
this.x += xAmount;
if (dirty) return;
float[] vertices = this.vertices;
vertices[X1] += xAmount;
vertices[X2] += xAmount;
vertices[X3] += xAmount;
vertices[X4] += xAmount;
}
/** Sets the y position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are
* changed, it is slightly more efficient to translate after those operations.
* @param yAmount How much to move the ColorfulSprite on the y-axis, in world space.
*/
public void translateY (float yAmount) {
y += yAmount;
if (dirty) return;
float[] vertices = this.vertices;
vertices[Y1] += yAmount;
vertices[Y2] += yAmount;
vertices[Y3] += yAmount;
vertices[Y4] += yAmount;
}
/** Sets the position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are
* changed, it is slightly more efficient to translate after those operations.
* @param xAmount How much to move the ColorfulSprite on the x-axis, in world space.
* @param yAmount How much to move the ColorfulSprite on the y-axis, in world space.
*/
public void translate (float xAmount, float yAmount) {
x += xAmount;
y += yAmount;
if (dirty) return;
float[] vertices = this.vertices;
vertices[X1] += xAmount;
vertices[Y1] += yAmount;
vertices[X2] += xAmount;
vertices[Y2] += yAmount;
vertices[X3] += xAmount;
vertices[Y3] += yAmount;
vertices[X4] += xAmount;
vertices[Y4] += yAmount;
}
/** Sets the color used to tint this sprite. Default is {@link Palette#GRAY}, which makes no changes to the color.
* Use {@link ColorTools#rgb(float, float, float, float)} or a predefined color from {@link Palette} if you
* don't have a color currently.
* @param color the packed float color used to add red, green, and blue to the current sprite, as well as the multiplier for alpha
*/
public void setColor (final float color) {
float[] vertices = this.vertices;
vertices[C1] = color;
vertices[C2] = color;
vertices[C3] = color;
vertices[C4] = color;
}
/** Sets the color used to tint this sprite and the tweak that affects how that color will be treated.
* Default color is {@link Palette#GRAY}, which makes no changes to the color, and default tweak is
* {@link ColorfulBatch#TWEAK_RESET}, which resets any changes to the tweak back to a neutral state. You can easily
* get a tweak value with {@link ColorTools#rgb(float, float, float, float)}, just using the last parameter
* to represent contrast.
* @param color the packed float color used to add red, green, and blue to the current sprite, as well as the multiplier for alpha
* @param tweak the packed float used to multiply red, green, and blue, as well as the setting for contrast
*/
public void setTweakedColor (final float color, final float tweak) {
float[] vertices = this.vertices;
vertices[C1] = color;
vertices[C2] = color;
vertices[C3] = color;
vertices[C4] = color;
vertices[T1] = tweak;
vertices[T2] = tweak;
vertices[T3] = tweak;
vertices[T4] = tweak;
}
/** Sets the color used to tint this sprite and the tweak that affects how that color will be treated.
* Default color is {@link Palette#GRAY}, which makes no changes to the color, and default tweak is
* {@link ColorfulBatch#TWEAK_RESET}, which resets any changes to the tweak back to a neutral state. You can easily
* get a tweak value with {@link ColorTools#rgb(float, float, float, float)}, just using the last parameter
* to represent contrast.
* @param redAdd how much red to add; darkest is 0f, neutral is 0.5f, lightest is 1f
* @param greenAdd how much green to add; darkest is 0f, neutral is 0.5f, lightest is 1f
* @param blueAdd how much blue to add; darkest is 0f, neutral is 0.5f, lightest is 1f
* @param alphaMul how much to multiply alpha by; fully transparent is 0f, neutral is 1f
* @param redMul how much source red should be multiplied by; eliminates at 0f, neutral is 0.5f, emphasizes at 1f
* @param greenMul how much source green should be multiplied by; eliminates at 0f, neutral is 0.5f, emphasizes at 1f
* @param blueMul how much source blue should be multiplied by; eliminates at 0f, neutral is 0.5f, emphasizes at 1f
* @param contrast how to affect the curvature of lightness in the source; 0f makes lightness very even, 0.5f doesn't change lightness, and 1f makes light colors lighter and dark colors darker
*/
public void setTweakedColor (float redAdd, float greenAdd, float blueAdd, float alphaMul,
float redMul, float greenMul, float blueMul, float contrast) {
final float color = ColorTools.rgb(redAdd, greenAdd, blueAdd, alphaMul),
tweak = ColorTools.rgb(redMul, greenMul, blueMul, contrast);
float[] vertices = this.vertices;
vertices[C1] = color;
vertices[C2] = color;
vertices[C3] = color;
vertices[C4] = color;
vertices[T1] = tweak;
vertices[T2] = tweak;
vertices[T3] = tweak;
vertices[T4] = tweak;
}
/** Sets the tweak that affects how the rendered color will be treated.
* Default tweak is {@link ColorfulBatch#TWEAK_RESET}, which resets any changes to the tweak back to a neutral
* state. You can easily get a tweak value with {@link ColorTools#rgb(float, float, float, float)}, just
* using the last parameter to represent contrast.
* @param tweak the packed float used to multiply red, green, and blue, as well as the setting for contrast
*/
public void setTweak (final float tweak) {
float[] vertices = this.vertices;
vertices[T1] = tweak;
vertices[T2] = tweak;
vertices[T3] = tweak;
vertices[T4] = tweak;
}
/**
* Sets the additive color of the sprite using the given RGBA Color.
* @param color a libGDX RGBA8888 Color
*/
public void setColor (Color color) {
setColor(color.toFloatBits());
}
/** Sets the alpha portion of the color used to tint this sprite. */
public void setAlpha (float a) {
final float color = FloatColors.setAlpha(getColor(), a);
final float[] vertices = this.vertices;
vertices[C1] = color;
vertices[C2] = color;
vertices[C3] = color;
vertices[C4] = color;
}
/** @see #setColor(float) */
public void setColor (float red, float green, float blue, float alpha) {
final float color = ColorTools.rgb(red, green, blue, alpha);
final float[] vertices = this.vertices;
vertices[C1] = color;
vertices[C2] = color;
vertices[C3] = color;
vertices[C4] = color;
}
/** @see #setTweak(float) */
public void setTweak (float red, float green, float blue, float contrast) {
final float tweak = ColorTools.rgb(red, green, blue, contrast);
final float[] vertices = this.vertices;
vertices[C1] = tweak;
vertices[C2] = tweak;
vertices[C3] = tweak;
vertices[C4] = tweak;
}
/** Exactly the same as {@link #setColor(float)}.
* @see #setColor(float)
*/
public void setPackedColor (float packedColor) {
float[] vertices = this.vertices;
vertices[C1] = packedColor;
vertices[C2] = packedColor;
vertices[C3] = packedColor;
vertices[C4] = packedColor;
}
/** Sets the origin in relation to the sprite's position for scaling and rotation. */
public void setOrigin (float originX, float originY) {
this.originX = originX;
this.originY = originY;
dirty = true;
}
/** Place origin in the center of the sprite */
public void setOriginCenter() {
this.originX = width * 0.5f;
this.originY = height * 0.5f;
dirty = true;
}
/** Sets the rotation of the sprite in degrees. Rotation is centered on the origin set in {@link #setOrigin(float, float)} */
public void setRotation (float degrees) {
this.rotation = degrees;
dirty = true;
}
/** @return the rotation of the sprite in degrees */
public float getRotation () {
return rotation;
}
/** Sets the sprite's rotation in degrees relative to the current rotation. Rotation is centered on the origin set in
* {@link #setOrigin(float, float)} */
public void rotate (float degrees) {
if (degrees == 0) return;
rotation += degrees;
dirty = true;
}
/** Rotates this sprite 90 degrees in-place by rotating the texture coordinates. This rotation is unaffected by
* {@link #setRotation(float)} and {@link #rotate(float)}. */
public void rotate90 (boolean clockwise) {
float[] vertices = this.vertices;
if (clockwise) {
float temp = vertices[V1];
vertices[V1] = vertices[V4];
vertices[V4] = vertices[V3];
vertices[V3] = vertices[V2];
vertices[V2] = temp;
temp = vertices[U1];
vertices[U1] = vertices[U4];
vertices[U4] = vertices[U3];
vertices[U3] = vertices[U2];
vertices[U2] = temp;
} else {
float temp = vertices[V1];
vertices[V1] = vertices[V2];
vertices[V2] = vertices[V3];
vertices[V3] = vertices[V4];
vertices[V4] = temp;
temp = vertices[U1];
vertices[U1] = vertices[U2];
vertices[U2] = vertices[U3];
vertices[U3] = vertices[U4];
vertices[U4] = temp;
}
}
/** Sets the sprite's scale for both X and Y uniformly. The sprite scales out from the origin. This will not affect the values
* returned by {@link #getWidth()} and {@link #getHeight()} */
public void setScale (float scaleXY) {
this.scaleX = scaleXY;
this.scaleY = scaleXY;
dirty = true;
}
/** Sets the sprite's scale for both X and Y. The sprite scales out from the origin. This will not affect the values returned by
* {@link #getWidth()} and {@link #getHeight()} */
public void setScale (float scaleX, float scaleY) {
this.scaleX = scaleX;
this.scaleY = scaleY;
dirty = true;
}
/** Sets the sprite's scale relative to the current scale. for example: original scale 2 → sprite.scale(4) → final scale 6.
* The sprite scales out from the origin. This will not affect the values returned by {@link #getWidth()} and
* {@link #getHeight()} */
public void scale (float amount) {
this.scaleX += amount;
this.scaleY += amount;
dirty = true;
}
/** Returns the packed vertices, colors, and texture coordinates for this sprite. */
public float[] getVertices () {
if (dirty) {
dirty = false;
float[] vertices = this.vertices;
float localX = -originX;
float localY = -originY;
float localX2 = localX + width;
float localY2 = localY + height;
float worldOriginX = this.x - localX;
float worldOriginY = this.y - localY;
if (scaleX != 1 || scaleY != 1) {
localX *= scaleX;
localY *= scaleY;
localX2 *= scaleX;
localY2 *= scaleY;
}
if (rotation != 0) {
final float cos = MathUtils.cosDeg(rotation);
final float sin = MathUtils.sinDeg(rotation);
final float localXCos = localX * cos;
final float localXSin = localX * sin;
final float localYCos = localY * cos;
final float localYSin = localY * sin;
final float localX2Cos = localX2 * cos;
final float localX2Sin = localX2 * sin;
final float localY2Cos = localY2 * cos;
final float localY2Sin = localY2 * sin;
final float x1 = localXCos - localYSin + worldOriginX;
final float y1 = localYCos + localXSin + worldOriginY;
vertices[X1] = x1;
vertices[Y1] = y1;
final float x2 = localXCos - localY2Sin + worldOriginX;
final float y2 = localY2Cos + localXSin + worldOriginY;
vertices[X2] = x2;
vertices[Y2] = y2;
final float x3 = localX2Cos - localY2Sin + worldOriginX;
final float y3 = localY2Cos + localX2Sin + worldOriginY;
vertices[X3] = x3;
vertices[Y3] = y3;
vertices[X4] = x1 + (x3 - x2);
vertices[Y4] = y3 - (y2 - y1);
} else {
final float x1 = localX + worldOriginX;
final float y1 = localY + worldOriginY;
final float x2 = localX2 + worldOriginX;
final float y2 = localY2 + worldOriginY;
vertices[X1] = x1;
vertices[Y1] = y1;
vertices[X2] = x1;
vertices[Y2] = y2;
vertices[X3] = x2;
vertices[Y3] = y2;
vertices[X4] = x2;
vertices[Y4] = y1;
}
}
return vertices;
}
/** Returns the bounding axis aligned {@link Rectangle} that bounds this sprite. The rectangles x and y coordinates describe its
* bottom left corner. If you change the position or size of the sprite, you have to fetch the triangle again for it to be
* recomputed.
*
* @return the bounding Rectangle */
public Rectangle getBoundingRectangle () {
final float[] vertices = getVertices();
float minx = vertices[X1];
float miny = vertices[Y1];
float maxx = vertices[X1];
float maxy = vertices[Y1];
minx = Math.min(minx, vertices[X2]);
minx = Math.min(minx, vertices[X3]);
minx = Math.min(minx, vertices[X4]);
maxx = Math.max(maxx, vertices[X2]);
maxx = Math.max(maxx, vertices[X3]);
maxx = Math.max(maxx, vertices[X4]);
miny = Math.min(miny, vertices[Y2]);
miny = Math.min(miny, vertices[Y3]);
miny = Math.min(miny, vertices[Y4]);
maxy = Math.max(maxy, vertices[Y2]);
maxy = Math.max(maxy, vertices[Y3]);
maxy = Math.max(maxy, vertices[Y4]);
if (bounds == null) bounds = new Rectangle();
bounds.x = minx;
bounds.y = miny;
bounds.width = maxx - minx;
bounds.height = maxy - miny;
return bounds;
}
public void draw (ColorfulBatch batch) {
batch.drawExactly(getTexture(), getVertices(), 0, SPRITE_SIZE);
}
public void draw (ColorfulBatch batch, float alphaModulation) {
final float oldAlpha = ColorTools.alpha(getColor());
setAlpha(oldAlpha * alphaModulation);
draw(batch);
setAlpha(oldAlpha);
}
public float getX () {
return x;
}
public float getY () {
return y;
}
/** @return the width of the sprite, not accounting for scale. */
public float getWidth () {
return width;
}
/** @return the height of the sprite, not accounting for scale. */
public float getHeight () {
return height;
}
/** The origin influences {@link #setPosition(float, float)}, {@link #setRotation(float)} and the expansion direction of scaling
* {@link #setScale(float, float)} */
public float getOriginX () {
return originX;
}
/** The origin influences {@link #setPosition(float, float)}, {@link #setRotation(float)} and the expansion direction of scaling
* {@link #setScale(float, float)} */
public float getOriginY () {
return originY;
}
/** X scale of the sprite, independent of size set by {@link #setSize(float, float)} */
public float getScaleX () {
return scaleX;
}
/** Y scale of the sprite, independent of size set by {@link #setSize(float, float)} */
public float getScaleY () {
return scaleY;
}
/** Returns the color of this sprite. If the returned instance is manipulated, {@link #setColor(float)} must be called
* afterward.
* @return a packed float color used to add red, green, and blue to the current sprite, as well as the multiplier for alpha
*/
public float getColor () {
return vertices[C1];
}
/**
* Returns the multiplicative color tweaks used by this sprite, as a packed float with the same format as a color.
* @return a packed float used to multiply red, green, and blue, as well as the setting for contrast
*/
public float getColorTweak () {
return vertices[T1];
}
public void setRegion (float u, float v, float u2, float v2) {
super.setRegion(u, v, u2, v2);
float[] vertices = this.vertices;
vertices[U1] = u;
vertices[V1] = v2;
vertices[U2] = u;
vertices[V2] = v;
vertices[U3] = u2;
vertices[V3] = v;
vertices[U4] = u2;
vertices[V4] = v2;
}
public void setU (float u) {
super.setU(u);
vertices[U1] = u;
vertices[U2] = u;
}
public void setV (float v) {
super.setV(v);
vertices[V2] = v;
vertices[V3] = v;
}
public void setU2 (float u2) {
super.setU2(u2);
vertices[U3] = u2;
vertices[U4] = u2;
}
public void setV2 (float v2) {
super.setV2(v2);
vertices[V1] = v2;
vertices[V4] = v2;
}
/** Set the sprite's flip state regardless of current condition
* @param x the desired horizontal flip state
* @param y the desired vertical flip state */
public void setFlip (boolean x, boolean y) {
boolean performX = false;
boolean performY = false;
if (isFlipX() != x) {
performX = true;
}
if (isFlipY() != y) {
performY = true;
}
flip(performX, performY);
}
/** boolean parameters x,y are not setting a state, but performing a flip
* @param x perform horizontal flip
* @param y perform vertical flip */
public void flip (boolean x, boolean y) {
super.flip(x, y);
float[] vertices = this.vertices;
if (x) {
float temp = vertices[U1];
vertices[U1] = vertices[U3];
vertices[U3] = temp;
temp = vertices[U2];
vertices[U2] = vertices[U4];
vertices[U4] = temp;
}
if (y) {
float temp = vertices[V1];
vertices[V1] = vertices[V3];
vertices[V3] = temp;
temp = vertices[V2];
vertices[V2] = vertices[V4];
vertices[V4] = temp;
}
}
public void scroll (float xAmount, float yAmount) {
float[] vertices = this.vertices;
if (xAmount != 0) {
float u = (vertices[U1] + xAmount) % 1;
float u2 = u + width / getTexture().getWidth();
setU(u);
setU2(u2);
vertices[U1] = u;
vertices[U2] = u;
vertices[U3] = u2;
vertices[U4] = u2;
}
if (yAmount != 0) {
float v = (vertices[V2] + yAmount) % 1;
float v2 = v + height / getTexture().getHeight();
setV(v);
setV2(v2);
vertices[V1] = v2;
vertices[V2] = v;
vertices[V3] = v;
vertices[V4] = v2;
}
}
}
|
def finish_render():
global _window
_window.flip() |
<gh_stars>1-10
package com.bfsi.mfi.service.impl;
import java.util.ArrayList;
import java.util.List;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.stereotype.Service;
import com.bfsi.mfi.dao.ReferenceCodeDao;
import com.bfsi.mfi.entity.RefCode;
import com.bfsi.mfi.exception.ServiceException;
import com.bfsi.mfi.service.ReferenceCodeService;
import com.bfsi.mfi.vo.RefCodeVO;
/**
* @author <NAME>
*
*/
@Service(value = "refcodeService")
public class ReferenceCodeServiceImpl extends MaintenanceServiceImpl<RefCodeVO, RefCode>
implements ReferenceCodeService {
@Autowired
private ReferenceCodeDao referenceCodeDao;
@Override
public RefCodeVO update(RefCodeVO vo) throws ServiceException {
vo.setAuthStatus("U");
try {
return super.update(vo);
} catch (DataAccessException e) {
throw new ServiceException("DataAccessException while updating auth status", e);
}
}
@SuppressWarnings("unchecked")
@Override
protected ReferenceCodeDao getMaintenanceDao() {
return referenceCodeDao;
}
protected RefCode getEntity(RefCodeVO vo) {
return vo.getEntity();
}
@Override
protected RefCodeVO getValueObject(RefCode entity) {
RefCodeVO refCodeVO = new RefCodeVO(entity);
return refCodeVO;
}
@Override
public List<RefCodeVO> getAuthorized() {
List<RefCode> refcode = referenceCodeDao.getAuthorized();
if (refcode != null) {
List<RefCodeVO> refCodeVOs = new ArrayList<RefCodeVO>(refcode.size());
for (RefCode lov : refcode) {
RefCodeVO refCodeVO = new RefCodeVO();
BeanUtils.copyProperties(lov, refCodeVO);
refCodeVOs.add(refCodeVO);
}
return refCodeVOs;
} else {
return new ArrayList<RefCodeVO>();
}
}
}
|
from fractions import *
f=[]
from math import *
# d2={}
# if 1==1:
# a=int(pow(2,25)-1)
#
# s=0
# lst=[]
# for b in range(1,a):
# lst.append(gcd(a^b,a&b))
# lst.sort()
#
# print a,lst[-1]
# d2[a]=lst[-1]
#print d2
tlst=[3, 7, 15, 31, 63, 127, 255, 511, 1023, 2047, 4095, 8191, 16383, 32767, 65535, 131071, 262143, 524287, 1048575, 2097151, 4194303, 8388607, 16777215]
q=input()
dic={3: 1, 1023: 341, 4194303: 1398101, 15: 5, 16383: 5461, 262143: 87381, 524287: 1, 32767: 4681, 4095: 1365, 2047: 89, 7: 1, 1048575: 349525, 31: 1, 63: 21, 65535: 21845, 2097151: 299593, 16777215: 5592405, 8191: 1, 127: 1, 255: 85, 511: 73, 8388607: 178481, 131071: 1}
dic[33554431]= 1082401
for _ in range(0,q):
#n=pow(2,25)-1
n=input()
if n in dic:
print dic[n]
else:
print int(pow(2,ceil(log(n+1,2)))-1)
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#include "core/common/logging/logging.h"
#include "core/framework/utils.h"
#include "core/session/inference_session.h"
#include "test/framework/test_utils.h"
#include "test/test_environment.h"
#include "test/providers/internal_testing/internal_testing_execution_provider.h"
#include "test/util/include/asserts.h"
#include "test/util/include/inference_session_wrapper.h"
#include "test/util/include/test_utils.h"
#include "gtest/gtest.h"
#include "gmock/gmock.h"
using namespace ONNX_NAMESPACE;
using namespace onnxruntime::logging;
namespace onnxruntime {
namespace test {
static void CreateSession(const SessionOptions& so, std::unique_ptr<InferenceSessionWrapper>& session,
const ORTCHAR_T* model_path = ORT_TSTR("testdata/mnist.onnx"), // arbitrary test model
bool enable_custom_ep = true,
const std::unordered_set<std::string>* override_supported_ops = nullptr) {
session = std::make_unique<InferenceSessionWrapper>(so, GetEnvironment());
// set supported ops to ops that are ideally found consecutively in the model.
// we can say the EP potentially handles them all, but can also test removing handling of one or more ops
// at runtime to simulate a lower spec device where not all ops can be handled. this allows us to test
// that we can revert ops back to the CPU implementation successfully
const std::unordered_set<std::string> default_supported_ops{"Conv", "Add", "Relu", "MaxPool"};
const std::unordered_set<std::string>* supported_ops = override_supported_ops ? override_supported_ops
: &default_supported_ops;
if (enable_custom_ep) {
ASSERT_STATUS_OK(session->RegisterExecutionProvider(
std::make_unique<InternalTestingExecutionProvider>(*supported_ops)));
}
ASSERT_STATUS_OK(session->Load(model_path));
ASSERT_STATUS_OK(session->Initialize());
}
static void ExecuteMnist(InferenceSessionWrapper& session, bool custom_ep_enabled) {
// validate that we can execute the model. the dummy internal testing EP just creates empty output so the
// values in the output aren't relevant. all we care about is that we can execute the model and produce output.
OrtValue ml_value_x;
TensorShape input_shape{1, 1, 28, 28};
std::vector<float> input(input_shape.Size(), 1.f);
CreateMLValue<float>(input_shape.GetDims(), input.data(), OrtMemoryInfo(), &ml_value_x);
NameMLValMap feeds;
feeds.insert(std::make_pair("Input3", ml_value_x));
// prepare outputs
std::vector<std::string> output_names;
output_names.push_back("Plus214_Output_0");
std::vector<OrtValue> fetches;
ASSERT_STATUS_OK(session.Run(feeds, output_names, &fetches));
if (custom_ep_enabled) {
// check that the output is all zeros. the dummy EP produces output of the correct shape with all zeros, so any
// downstream operations should still result in zeros for this model
// OR it should equal the bias in the final Add operation, which is in the Parameter194 initializer
const auto& t = fetches[0].Get<Tensor>();
const auto data = t.DataAsSpan<float>();
int idx = 0;
const auto& session_state = session.GetSessionState();
ASSERT_STATUS_OK(session_state.GetOrtValueNameIdxMap().GetIdx("Parameter194", idx));
const auto& initializer = session_state.GetConstantInitializedTensors().at(idx);
const auto expected = initializer.Get<Tensor>().DataAsSpan<float>();
ASSERT_THAT(data, ::testing::ContainerEq(expected));
}
}
#if !defined(DISABLE_SPARSE_TENSORS)
#if !defined(ORT_MINIMAL_BUILD)
TEST(InternalTestingEP, TestSaveAndLoadOrtModel) {
const ORTCHAR_T* ort_model_path = ORT_TSTR("testdata/mnist.internal_testing_ep.test_output.ort");
//
// First load the onnx format model and save as an ORT model.
// This should preserve the nodes the custom EP can handle.
//
std::unique_ptr<InferenceSessionWrapper> session;
SessionOptions so;
so.optimized_model_filepath = ort_model_path;
CreateSession(so, session);
// this graph should include the original nodes that the custom EP will take at runtime
auto num_nodes = session->GetGraph().NumberOfNodes();
//
// Second, load the ORT format model with just the CPU EP to make sure it can be executed. This tests that the
// fallback to the CPU EP kernel hashes works.
//
std::unique_ptr<InferenceSessionWrapper> session2;
so.optimized_model_filepath.clear();
bool enable_custom_ep = false;
CreateSession(so, session2, ort_model_path, enable_custom_ep);
const auto& graph1 = session2->GetGraph();
// model should have all the original nodes and we should be able to execute with the fallback to CPU EP
ASSERT_EQ(graph1.NumberOfNodes(), num_nodes);
ExecuteMnist(*session2, enable_custom_ep);
session2 = nullptr;
//
// Finally, load the ORT format model with the custom EP enabled. This tests that we support runtime compilation
// for the ORT format model.
//
enable_custom_ep = true;
CreateSession(so, session2, ort_model_path, enable_custom_ep);
const auto& graph2 = session2->GetGraph();
// model should be able to be loaded, and we should compile using custom ep. that will result in one node for the
// custom EP (with Conv/Add/Relu/MaxPool), one for a reshape, and one for the fused MatMul+Add.
ASSERT_EQ(graph2.NumberOfNodes(), 3);
ExecuteMnist(*session2, enable_custom_ep);
}
TEST(InternalTestingEP, PreventSaveOfModelWithCompiledOps) {
const ORTCHAR_T* ort_model_path = ORT_TSTR("testdata/mnist.internal_testing_ep.ort");
// make sure we can't save a model with compiled ops. input/output model format doesn't matter
SessionOptions so;
so.optimized_model_filepath = ORT_TSTR("invalid_model.ort");
auto session = std::make_unique<InferenceSessionWrapper>(so, GetEnvironment());
const std::unordered_set<std::string> supported_ops{"Conv", "Add", "Relu", "MaxPool"};
ASSERT_STATUS_OK(session->RegisterExecutionProvider(
std::make_unique<InternalTestingExecutionProvider>(supported_ops)));
ASSERT_STATUS_OK(session->Load(ort_model_path));
auto status = session->Initialize();
ASSERT_FALSE(status.IsOK()) << "Initialize should have failed when trying to save model with compiled kernels";
ASSERT_THAT(status.ErrorMessage(), ::testing::HasSubstr("Unable to serialize model as it contains compiled nodes"));
}
#endif // !defined(ORT_MINIMAL_BUILD)
#endif // !defined(DISABLE_SPARSE_TENSORS)
// test to validate a minimal build
TEST(InternalTestingEP, TestLoadOrtModel) {
const ORTCHAR_T* ort_model_path = ORT_TSTR("testdata/mnist.internal_testing_ep.ort");
std::unique_ptr<InferenceSessionWrapper> session;
bool enable_custom_ep = true;
CreateSession(SessionOptions{}, session, ort_model_path, enable_custom_ep);
ExecuteMnist(*session, enable_custom_ep);
}
// test that is the custom EP cannot take all nodes due to device limitations
// that we fallback to the CPU implementations and can execute the model
TEST(InternalTestingEP, TestLoadOrtModelWithReducedOpCoverage) {
const ORTCHAR_T* ort_model_path = ORT_TSTR("testdata/mnist.internal_testing_ep.ort");
const std::unordered_set<std::string> supported_ops{"Conv", "Add", "Relu" /*, "MaxPool"*/};
std::unique_ptr<InferenceSessionWrapper> session;
bool enable_custom_ep = true;
CreateSession(SessionOptions{}, session, ort_model_path, enable_custom_ep, &supported_ops);
const auto& graph = session->GetGraph();
// Conv+Add gets fused by level 1 optimizer into single node. The 'Conv'/'Add'/'Relu' nodes should be compiled and
// handled by the custom EP. fallback to CPU for MaxPool.
ASSERT_EQ(graph.NumberOfNodes(), 6);
const auto& func_mgr = session->GetSessionState().GetFuncMgr();
NodeComputeInfo* compute_func = nullptr;
// the generated op type should have a hash for the model based on the model path
const std::string expected_op_type_prefix = "InternalTestingEP_9611636968429821767_";
int compiled_node_num = 0;
for (const auto& node : graph.Nodes()) {
EXPECT_EQ(supported_ops.count(node.OpType()), size_t(0))
<< "Nodes with supported op types should have been replaced. Node with type " << node.OpType() << " was not.";
if (node.GetExecutionProviderType() == utils::kInternalTestingExecutionProvider) {
EXPECT_STATUS_OK(func_mgr.GetFuncs(node.Name(), compute_func));
EXPECT_NE(compute_func, nullptr);
EXPECT_EQ(node.OpType(), expected_op_type_prefix + std::to_string(compiled_node_num++));
}
}
ExecuteMnist(*session, enable_custom_ep);
}
// count nodes assigned to the test EP and make sure they all have valid compute funcs
static int CountAndValidateAssignedNodes(const Graph& current_graph,
const std::unordered_set<std::string>& supported_ops,
const FuncManager& func_mgr) {
int count = 0;
for (const auto& node : current_graph.Nodes()) {
EXPECT_EQ(supported_ops.count(node.OpType()), size_t(0))
<< "Nodes with supported op types should have been replaced. Node with type " << node.OpType() << " was not.";
if (node.GetExecutionProviderType() == utils::kInternalTestingExecutionProvider) {
NodeComputeInfo* compute_func = nullptr;
EXPECT_STATUS_OK(func_mgr.GetFuncs(node.Name(), compute_func));
EXPECT_NE(compute_func, nullptr);
++count;
}
if (node.ContainsSubgraph()) {
for (const auto& entry : node.GetSubgraphs()) {
count += CountAndValidateAssignedNodes(*entry, supported_ops, func_mgr);
}
}
}
return count;
}
// Test model that contains a subgraph. This model has a Loop and an If so multiple layers of nested subgraphs.
// There are Add nodes in the Loop and If subgraphs so we should see the custom EP taking nodes at both these levels.
TEST(InternalTestingEP, TestModelWithSubgraph) {
const ORTCHAR_T* ort_model_path = ORT_TSTR("testdata/ort_github_issue_4031.onnx.ort");
const std::unordered_set<std::string> supported_ops{"Add"};
std::unique_ptr<InferenceSessionWrapper> session;
bool enable_custom_ep = true;
CreateSession(SessionOptions{}, session, ort_model_path, enable_custom_ep, &supported_ops);
const auto& graph = session->GetGraph();
const auto& func_mgr = session->GetSessionState().GetFuncMgr();
int num_replaced_nodes = CountAndValidateAssignedNodes(graph, supported_ops, func_mgr);
// One Add node in the Loop. One Add node in each branch of the If inside the Loop body
ASSERT_EQ(num_replaced_nodes, 3);
OrtValue ml_value;
// this is a bit of a hack. the correct output is the input value + 2, so if we start with -2 the result is 0.
// the output from fused nodes using the testing EP is always 0, so we should match the expected output this way
// as we replace all the Add nodes with something that returns 0.
// RunAndVerifyOutputsWithEP checks that nodes are assigned to the EP so we know it's being used to execute the model
CreateMLValue<float>(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), {1}, {-2.f},
&ml_value);
NameMLValMap feeds;
feeds.insert(std::make_pair("state_var_in", ml_value));
// compare outputs from CPU EP vs custom EP
RunAndVerifyOutputsWithEP(ort_model_path,
"InternalTestingEP.TestModelWithSubgraph",
std::make_unique<InternalTestingExecutionProvider>(supported_ops),
feeds);
}
// A custom InternalTestingEP extension
// This is to testing execution fall back to CPU EP if Compile fails, for ORT format
// This EP will take an additional compile_failure_ops
// If in Compile() any nodes in the partition is also in compile_failure_ops
// The Compile will fail
class CompileFailureTestExecutionProvider : public InternalTestingExecutionProvider {
public:
CompileFailureTestExecutionProvider(const std::unordered_set<std::string>& supported_ops,
const std::unordered_set<std::string>& compile_failure_ops);
virtual ~CompileFailureTestExecutionProvider() = default;
Status Compile(const std::vector<FusedNodeAndGraph>& fused_nodes,
std::vector<NodeComputeInfo>& node_compute_funcs) override;
private:
std::unordered_set<std::string> compile_failure_ops_;
};
CompileFailureTestExecutionProvider::CompileFailureTestExecutionProvider(
const std::unordered_set<std::string>& supported_ops,
const std::unordered_set<std::string>& compile_failure_ops)
: InternalTestingExecutionProvider(supported_ops),
compile_failure_ops_(compile_failure_ops) {}
Status CompileFailureTestExecutionProvider::Compile(const std::vector<FusedNodeAndGraph>& fused_nodes,
std::vector<NodeComputeInfo>& node_compute_funcs) {
for (const auto& fused_node_and_graph : fused_nodes) {
// If any nodes in this partition is also in compile_failure_ops_, the Compile will fail
const onnxruntime::GraphViewer& graph_viewer(fused_node_and_graph.filtered_graph);
for (const auto& node : graph_viewer.Nodes()) {
if (compile_failure_ops_.find(node.OpType()) != compile_failure_ops_.end()) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL,
"CompileFailureTestExecutionProvider::Compile failed for node: ", node.Name());
}
}
}
return InternalTestingExecutionProvider::Compile(fused_nodes, node_compute_funcs);
}
TEST(InternalTestingEP, TestOrtModelWithCompileFailure) {
// In the test file, there are 2 Conv and 1 Gemm nodes, all disconnected
// So we should have 3 partitions be taken by InternalTestingExecutionProvider/CompileFailureTestExecutionProvider
// But CompileFailureTestExecutionProvider will fail the Compile for partition contains "Gemm" node
// This is to test the model initialization won't fail and Gemm node will not be replaced by the fused_node
const ORTCHAR_T* ort_model_path = ORT_TSTR("testdata/mnist.internal_testing_ep.ort");
const std::unordered_set<std::string>& supported_ops{"Conv", "Gemm"};
const std::unordered_set<std::string>& compile_failure_ops{"Gemm"};
// Use InternalTestingExecutionProvider
// We should have 3 partitions taken by the EP
// 2 Conv and 1 Gemm
{
InferenceSessionWrapper session(SessionOptions(), GetEnvironment());
ASSERT_STATUS_OK(session.RegisterExecutionProvider(
std::make_unique<InternalTestingExecutionProvider>(supported_ops)));
ASSERT_STATUS_OK(session.Load(ort_model_path));
ASSERT_STATUS_OK(session.Initialize());
int num_replaced_nodes = CountAndValidateAssignedNodes(
session.GetGraph(), supported_ops, session.GetSessionState().GetFuncMgr());
ASSERT_EQ(num_replaced_nodes, 3);
}
// Use CompileFailureTestExecutionProvider which will fail Compile on "Gemm"
// We should have 2 partitions taken by the EP
// 2 Conv
{
InferenceSessionWrapper session(SessionOptions(), GetEnvironment());
ASSERT_STATUS_OK(session.RegisterExecutionProvider(
std::make_unique<CompileFailureTestExecutionProvider>(supported_ops, compile_failure_ops)));
ASSERT_STATUS_OK(session.Load(ort_model_path));
ASSERT_STATUS_OK(session.Initialize());
// 2 Conv nodes shoule be replaced with fused nodes
const auto& graph = session.GetGraph();
int num_replaced_nodes = CountAndValidateAssignedNodes(
session.GetGraph(), {"Conv"}, session.GetSessionState().GetFuncMgr());
ASSERT_EQ(num_replaced_nodes, 2);
// The Gemm node should still not have been replaced
int count_compile_failure_nodes = 0;
for (const auto& node : graph.Nodes()) {
if (compile_failure_ops.find(node.OpType()) != compile_failure_ops.end())
count_compile_failure_nodes++;
}
ASSERT_EQ(count_compile_failure_nodes, 1);
// Execute the session, since the last node is Gemm, and its input 0 is all 0s
// So the result should be the bias initializer of the Gemm node
ExecuteMnist(session, true /* enable_custom_ep */);
}
}
} // namespace test
} // namespace onnxruntime
|
/*
Copyright 2011 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package jepl.impl.lex;
/**
*
* @author jmarranz
*/
public class JDBCParamWithNameToken extends JDBCParamToken
{
protected Identifier identifier;
/**
* Creates a new instance of FloatNumber
*/
public JDBCParamWithNameToken(Cursor cursor)
{
super(cursor.getCurrentPos());
parse(cursor);
}
public static boolean isJDBCParamWithNameToken(char c,Cursor cursor)
{
if (c != ':') return false;
// Vemos si el siguiente caracter es el comienzo de un identificador
if (cursor.isLastPos()) return false; // No sigue un identificador
char c2 = cursor.getNextChar();
if (!Identifier.isIdentifierStart(c2)) return false;
return true;
}
@Override
public String toString()
{
return ":" + identifier.toString();
}
public String getName()
{
return identifier.toString();
}
public void parse(Cursor cursor)
{
StringBuilder valueTmp = new StringBuilder();
valueTmp.append( cursor.getCurrentChar() ); // El :
cursor.inc(); // Comienzo del identificador
this.identifier = new Identifier(cursor);
this.end = identifier.getEnd(); // apunta al último caracter del identificador
}
}
|
// A calculator that converts a Matrix M to a vector containing all the
// entries of M in column-major order.
//
// Example config:
// node {
// calculator: "MatrixToVectorCalculator"
// input_stream: "input_matrix"
// output_stream: "column_major_vector"
// }
class MatrixToVectorCalculator : public Node {
public:
static constexpr Input<Matrix> kIn{""};
static constexpr Output<std::vector<float>> kOut{""};
MEDIAPIPE_NODE_CONTRACT(kIn, kOut);
mediapipe::Status Process(CalculatorContext* cc) override;
} |
/**
* Contains tests that test DDL scripts.
*/
public class DDLScriptsTest {
/**
* Tests that DB schema is created properly using DDL scripts.
* @throws IOException
*/
@Test
public void testCreateSchema() throws IOException, SQLException {
// Clear schema.
TestsUtil.clearSchema();
final TestPersistenceContext scriptRunnerContext = new TestPersistenceContext();
scriptRunnerContext.init(PersistenceUnit.SCRIPT_RUNNER);
try {
scriptRunnerContext.executeScripts(new File(getClass().getResource("/ddl-scripts").getFile()));
} finally {
scriptRunnerContext.clean();
}
final TestPersistenceContext dbTestingContext = new TestPersistenceContext();
dbTestingContext.init(PersistenceUnit.DB_TESTING);
try {
dbTestingContext.startAndPersistSomeProcess("minimalProcess");
Assert.assertTrue(dbTestingContext.getStoredProcessesCount() == 1);
} finally {
dbTestingContext.clean();
}
}
} |
<filename>main.h
#pragma once
#include "snoutlib/glfwapp.h"
#include "snoutlib/settings.h"
#include "snoutlib/loadingscreen.h"
#include "snoutlib/timer.h"
#include "snoutlib/misc.h"
#include "snoutlib/mfont.h"
#include "snoutlib/menu.h"
#include "snoutlib/staticmesh.h"
#include "snoutlib/gldefs.h"
#include "snoutlib/particles.h"
#include "pe_smoke.h"
#include "pe_bubbles.h"
#include "resources.h"
#include "gamemenu.h"
#include "layout.h"
#include "background.h"
#include "boat.h"
#include "pboat.h"
#include "credits.h"
#include "hiscore.h"
#include "torpedo.h"
#include "game.h"
class App {
Glfwapp *m_ctx;
FPScounter *m_fpscounter;
Background *m_background;
Credits *m_credits;
public:
GameMenu *m_gamemenu;
HiScore *m_hiscore;
private:
void init_settings(void);
void gl_check(void);
static void mousebuttoncb_wrapper(int button, int action);
void mousebutton_cb(int button,int action);
static void mouseposcb_wrapper(int x, int y);
void mousepos_cb(int x,int y);
static void mousewheelcb_wrapper(int pos);
void mousewheel_cb(int pos);
static void keycb_wrapper(int key, int action);
void keyboard_cb(int key,int action);
static void charcb_wrapper(int ch, int action);
void character_cb(int ch,int action);
public:
App(void);
~App();
void display_version(void);
void run_inner_loop(void);
void run(void);
};
|
import { randomBytes } from "crypto";
import { createReadStream, createWriteStream, statSync } from "fs";
import { rm, mkdir, stat as statAsync } from "fs/promises";
import { tmpdir } from "os";
import { basename, dirname, extname, resolve as resolvePath } from "path";
import type { Readable } from "stream";
import { finished } from "stream";
import { promisify } from "util";
import { MaxPartSizeExceededError } from "@remix-run/server-runtime";
import type { UploadHandler } from "@remix-run/server-runtime";
// @ts-expect-error
import * as streamSlice from "stream-slice";
import {
createReadableStreamFromReadable,
readableStreamToString,
} from "../stream";
export type FileUploadHandlerFilterArgs = {
filename: string;
contentType: string;
name: string;
};
export type FileUploadHandlerPathResolverArgs = {
filename: string;
contentType: string;
name: string;
};
/**
* Chooses the path of the file to be uploaded. If a string is not
* returned the file will not be written.
*/
export type FileUploadHandlerPathResolver = (
args: FileUploadHandlerPathResolverArgs
) => string | undefined;
export type FileUploadHandlerOptions = {
/**
* Avoid file conflicts by appending a count on the end of the filename
* if it already exists on disk. Defaults to `true`.
*/
avoidFileConflicts?: boolean;
/**
* The directory to write the upload.
*/
directory?: string | FileUploadHandlerPathResolver;
/**
* The name of the file in the directory. Can be a relative path, the directory
* structure will be created if it does not exist.
*/
file?: FileUploadHandlerPathResolver;
/**
* The maximum upload size allowed. If the size is exceeded an error will be thrown.
* Defaults to 3000000B (3MB).
*/
maxPartSize?: number;
/**
*
* @param filename
* @param mimetype
* @param encoding
*/
filter?(args: FileUploadHandlerFilterArgs): boolean | Promise<boolean>;
};
let defaultFilePathResolver: FileUploadHandlerPathResolver = ({ filename }) => {
let ext = filename ? extname(filename) : "";
return "upload_" + randomBytes(4).readUInt32LE(0) + ext;
};
async function uniqueFile(filepath: string) {
let ext = extname(filepath);
let uniqueFilepath = filepath;
for (
let i = 1;
await statAsync(uniqueFilepath)
.then(() => true)
.catch(() => false);
i++
) {
uniqueFilepath =
(ext ? filepath.slice(0, -ext.length) : filepath) +
`-${new Date().getTime()}${ext}`;
}
return uniqueFilepath;
}
export function createFileUploadHandler({
directory = tmpdir(),
avoidFileConflicts = true,
file = defaultFilePathResolver,
filter,
maxPartSize = 3000000,
}: FileUploadHandlerOptions = {}): UploadHandler {
return async ({ name, filename, contentType, data }) => {
if (
!filename ||
(filter && !(await filter({ name, filename, contentType })))
) {
return undefined;
}
let dir =
typeof directory === "string"
? directory
: directory({ name, filename, contentType });
if (!dir) {
return undefined;
}
let filedir = resolvePath(dir);
let path =
typeof file === "string" ? file : file({ name, filename, contentType });
if (!path) {
return undefined;
}
let filepath = resolvePath(filedir, path);
if (avoidFileConflicts) {
filepath = await uniqueFile(filepath);
}
await mkdir(dirname(filepath), { recursive: true }).catch(() => {});
let writeFileStream = createWriteStream(filepath);
let size = 0;
let deleteFile = false;
try {
for await (let chunk of data) {
size += chunk.byteLength;
if (size > maxPartSize) {
deleteFile = true;
throw new MaxPartSizeExceededError(name, maxPartSize);
}
writeFileStream.write(chunk);
}
} finally {
writeFileStream.end();
await promisify(finished)(writeFileStream);
if (deleteFile) {
await rm(filepath).catch(() => {});
}
}
return new NodeOnDiskFile(filepath, contentType);
};
}
export class NodeOnDiskFile implements File {
name: string;
lastModified: number = 0;
webkitRelativePath: string = "";
constructor(
private filepath: string,
public type: string,
private slicer?: { start: number; end: number }
) {
this.name = basename(filepath);
}
get size(): number {
let stats = statSync(this.filepath);
if (this.slicer) {
let slice = this.slicer.end - this.slicer.start;
return slice < 0 ? 0 : slice > stats.size ? stats.size : slice;
}
return stats.size;
}
slice(start?: number, end?: number, type?: string): Blob {
if (typeof start === "number" && start < 0) start = this.size + start;
if (typeof end === "number" && end < 0) end = this.size + end;
let startOffset = this.slicer?.start || 0;
start = startOffset + (start || 0);
end = startOffset + (end || this.size);
return new NodeOnDiskFile(
this.filepath,
typeof type === "string" ? type : this.type,
{
start,
end,
}
);
}
async arrayBuffer(): Promise<ArrayBuffer> {
let stream: Readable = createReadStream(this.filepath);
if (this.slicer) {
stream = stream.pipe(
streamSlice.slice(this.slicer.start, this.slicer.end)
);
}
return new Promise((resolve, reject) => {
let buf: any[] = [];
stream.on("data", (chunk) => buf.push(chunk));
stream.on("end", () => resolve(Buffer.concat(buf)));
stream.on("error", (err) => reject(err));
});
}
stream(): ReadableStream<any>;
stream(): NodeJS.ReadableStream;
stream(): ReadableStream<any> | NodeJS.ReadableStream {
let stream: Readable = createReadStream(this.filepath);
if (this.slicer) {
stream = stream.pipe(
streamSlice.slice(this.slicer.start, this.slicer.end)
);
}
return createReadableStreamFromReadable(stream);
}
async text(): Promise<string> {
return readableStreamToString(this.stream());
}
public get [Symbol.toStringTag]() {
return "File";
}
}
|
/**
* Implementation of a GOTermSimilarity measure to compare GO codes for texts
* against GO codes for genes, comparable to Schlicker et al. This class uses a
* GOAccess to access a database that contains shortest distances and Lowest Common
* Ancestors (LCA) for GO terms.
* <br>
* <br>
* Similarity/distance is based on the shortest path using two terms' LCA and
* the depth of this LCA in the overall hierarchy.
*
*
*
* @author Jörg Hakenberg <[email protected]>
*/
public class GOTermSimilarity implements Serializable {
/** */
private static final long serialVersionUID = 5368860031806091643L;
/** */
public int verbosity = 0;
/** Stores the distances between two GO codes. Key: "GO:0005575;GO:0001695". */
private Map<String, Float> goTermDistances = new HashMap<String, Float>();
/** Where to find the existing GO term distances. Default: go2go.obj.<br>
* Format: HashMap<String, Float>, see <code>goTermDistances</code>. */
public String go2gofile = "data/go2go.object";
private int initiallyLoadedSimilarities = 0;
private boolean computedNewSimilarity = false;
private static GOAccess goAccess;
/**
* Accesses the database that contains LCA information on GO terms
* when distances were not already calculated before. These distances
* are stored in <tt>go2gofile</tt>. If set to false, uses distances
* from that file only and returns NEG.INF for unknown distances.
*/
private boolean useDatabase = true;
/**
*
*/
public GOTermSimilarity (GOAccess go) {
goAccess = go;
}
/**
* Closes the current connection to the DB. Should be called after this class is used.
*
*/
public void closeDBConnection () {
if (goAccess != null)
if (goAccess.isOpen())
goAccess.close();
}
/**
* Returns the shortest distance between two GO codes in the Gene Ontology.
*
* @param goCode1
* @param goCode2
* @return
*/
public float getDistance (String goCode1, String goCode2) {
goCode1 = goCode1.replaceAll("^GO\\:0+", "");
goCode2 = goCode2.replaceAll("^GO\\:0+", "");
if (goCode1.equals(goCode2)) return 0.0f;
String key = goCode1 + ";" + goCode2;
//if(verbosity>3){
// System.out.println(this.getClass().getSimpleName()+": getDistance for key='"+key+"'");
//}
if (goTermDistances.containsKey(key)) {
return goTermDistances.get(key);
}
key = goCode2 + ";" + goCode1;
if (goTermDistances.containsKey(key))
return goTermDistances.get(key);
if (!useDatabase)
return Float.NEGATIVE_INFINITY;
//System.err.println("#GTS: getting distance");
float dist = goAccess.getDistance(goCode1, goCode2);
if(verbosity>3){
System.out.println(this.getClass().getSimpleName()+": getDistance: computed new similarity = "+dist);
}
key = goCode1 + ";" + goCode2;
goTermDistances.put(key, dist);
computedNewSimilarity = true;
return dist;
}
/**
* Returns the similarity of two GO codes.
*
* @param goCode1
* @param goCode2
* @return
*/
public static float getSimilarity (String goCode1, String goCode2) {
if (goCode1.equals(goCode2)) return 1.0f;
//GOAccess goAccess = new GOAccess();
float dist = goAccess.getDistance(goCode1, goCode2);
return 1.0f - dist;
}
/**
* Returns a score between to lists of GO codes based on the average of pairwise comparison.
*
* @param codes1
* @param codes2
* @return
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public float getGOSimilarity (LinkedList<String> codes1, LinkedList<String> codes2) {
//System.err.println("Calling GTS.getGOSim");
//System.err.println("#Scoring GO codes for gene vs text:");
//System.err.println("# codes 1: " + codes1);
//System.err.println("# codes 2: " + codes2);
// maps a GO-Code pair to its distance
Hashtable<String, Float> pair2distance = new Hashtable<String, Float>();
// contain all GO codes for each list as a set
TreeSet<String> set1 = new TreeSet<String>();
TreeSet<String> set2 = new TreeSet<String>();
// get the scores for all pairs
for (String go1: codes1) {
set1.add(go1);
for (String go2: codes2) {
set2.add(go2);
float distance = getDistance(go1, go2);
//if (gosim > sim) sim = gosim;
String pair = go1 + ";" + go2;
pair2distance.put(pair, new Float(distance));
}
}
// now sort the results by distance
Set<Map.Entry<String, Float>> set = pair2distance.entrySet();
Map.Entry[] entries = (Map.Entry[]) set.toArray(new Map.Entry[set.size()]);
//FloatComparator fc = new FloatComparator();
Arrays.sort(entries, new Comparator() {
public int compare(Object o1, Object o2) {
Object v1 = ((Map.Entry) o1).getValue();
Object v2 = ((Map.Entry) o2).getValue();
return ((Float) v1).compareTo((Float)v2); // v1, v2 => lowest first
}
});
float average = 0.0f;
float count = 0.0f;
// for (int e = 0; e < entries.length; e++) {
// String key = (String)entries[e].getKey();
// float dist = pair2distance.get(key).floatValue();
// //if (verbosity > 3) {
// // System.err.println("# pair " + key + ": sim=" + (1.0f-dist));
// //}
// }
// get the best fitting pairs of myID and pID, remove them from the sets,
// until one set is empty
for (int e = 0; e < entries.length; e++) {
String key = (String)entries[e].getKey();
float dist = pair2distance.get(key).floatValue();
if (dist == Float.POSITIVE_INFINITY)
break;
//System.out.println("# GO check for " + key + " = " + dist);
String[] ids = key.split(";");
if (!set1.contains(ids[0]) || !set2.contains(ids[1]))
continue;
average += dist;
count += 1.0f;
set1.remove(ids[0]);
set2.remove(ids[1]);
if (set1.size() == 0 || set2.size() == 0)
break;
}
float score = (1 - (average / count));
//System.err.println("# score = " + score);
return score;
}
/**
* Loads precomputed GO terms distances from a serialized object file (Map<String, Float>).
* @param filename
*/
@SuppressWarnings("unchecked")
public void loadGOTermDistances (String filename) {
go2gofile = filename;
FileInputStream fis = null;
ObjectInputStream in = null;
File FILE = new File(filename);
try {
fis = new FileInputStream(FILE);
in = new ObjectInputStream(fis);
goTermDistances = (HashMap<String, Float>)in.readObject();
// for (String key : goTermDistances.keySet()) {
// if(goTermDistances.get(key)< Double.POSITIVE_INFINITY){
// System.err.println("# key = "+key+", dist = "+goTermDistances.get(key));
// }
// }
in.close();
} catch (java.io.FileNotFoundException fnfe) {
//cnfe.printStackTrace();
System.err.println("#ERROR no such file: " + go2gofile + ", starting with empty set of GO-to-GO distances.");
goTermDistances = new HashMap<String, Float>();
} catch (ClassNotFoundException cnfe) {
System.err.println("#ERROR opening " + go2gofile + ": unexpected content");
//cnfe.printStackTrace();
goTermDistances = new HashMap<String, Float>();
} catch (java.io.StreamCorruptedException sce) {
System.err.println("#ERROR opening " + go2gofile + ": " + sce.getMessage());
sce.printStackTrace();
//return null;
goTermDistances = new HashMap<String, Float>();
} catch (java.io.EOFException ee) {
System.err.println("#ERROR opening " + go2gofile + ": " + ee.getMessage());
//ee.printStackTrace();
//return null;
goTermDistances = new HashMap<String, Float>();
} catch (java.io.IOException ioe) {
System.err.println("#ERROR opening " + go2gofile + ": " + ioe.getMessage());
//ioe.printStackTrace();
goTermDistances = new HashMap<String, Float>();
}
initiallyLoadedSimilarities = goTermDistances.size();
if(verbosity>0)
System.err.println("# Loaded GO term distances from disk, #pairs: " + initiallyLoadedSimilarities);
}
/**
* Returns a similarity of the two sets based on the most similar pair
* that was found.
*
* @param codes1
* @param codes2
* @return
*/
public static float scoreMostSimilarGOCodes (LinkedList<String> codes1, LinkedList<String> codes2) {
float sim = 0.0f;
for (String go1: codes1) {
for (String go2: codes2) {
float gosim = getSimilarity(go1, go2);
if (gosim > sim) sim = gosim;
}
}
return sim;
}
/**
*
* Writes all go term distances to file.
*
* @return
*/
public boolean writeGOTermDistances () {
if (computedNewSimilarity || (initiallyLoadedSimilarities < goTermDistances.size())) {
//System.err.println("#Calling writeGO (1)");
if(verbosity>0)
System.err.println("# Writing GO term distances to disk, #pairs: " + goTermDistances.size());
FileOutputStream fos = null;
ObjectOutputStream out = null;
File FILE = new File(go2gofile);
try {
fos = new FileOutputStream(FILE);
out = new ObjectOutputStream(fos);
out.writeObject(goTermDistances);
out.close();
} catch(IOException ex) {
ex.printStackTrace();
return false;
}
return true;
}
else{
//System.err.println("#Calling writeGO (2)");
if(verbosity>0)
System.err.println("# No new GO term similarities. Skip writing.");
return true;
}
}
/**
*
* @param args
*/
public static void main (String[] args) {
LinkedList<String> codes1 = new LinkedList<String>();
LinkedList<String> codes2 = new LinkedList<String>();
String[] gene = {"3677", "3723", "166", "5515", "6310", "6281", "8380", "398", "6355", "6350", "5634"};
String[] text = {"GO:0003779", "GO:0003680", "GO:0005576", "GO:0008380", "GO:0005488", "GO:0016021",
"GO:0005622", "GO:0005856", "GO:0005623"};
//# norm.overlap=0.0, cos=0.0, splittedCos=0.0, go.sim=0.8996803, max=0.8996803
GOTermSimilarity goScorer = new GOTermSimilarity(new GOAccess(ISGNProperties.get("dbDriver"), ISGNProperties.get("dbAccessUrl"),
ISGNProperties.get("dbUser"), ISGNProperties.get("dbPass")));
goScorer.loadGOTermDistances("data/go2go.obj");
for (String c: gene)
codes1.add(c);
for (String c: text)
codes2.add(c);
System.out.println("# Sim: " + goScorer.getGOSimilarity(codes1, codes2));
goScorer.writeGOTermDistances();
}
} |
<reponame>anqqa/klubitus-pwa
import { CallHandler, ExecutionContext, Injectable, NestInterceptor } from '@nestjs/common';
import { FastifyRequest } from 'fastify';
import { Observable } from 'rxjs';
import { User } from '../users/user.entity';
const DEFAULT_KEY = 'user_id';
@Injectable()
export class InjectUserInterceptor implements NestInterceptor {
constructor(private key: string = DEFAULT_KEY) {}
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
const req: FastifyRequest & { user?: User } = context.switchToHttp().getRequest();
if (req.user) {
if (['PATCH', 'POST', 'PUT'].includes(req.raw.method)) {
req.body[this.key || DEFAULT_KEY] = req.user.id;
} else {
req.query.filter = `${this.key || DEFAULT_KEY}||eq||${req.user.id}`;
}
}
return next.handle();
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* author <NAME>
* <NAME>
* <NAME>
*/
#ifdef WIN32
#include <Windows.h>
#else
#include <sys/time.h>
#include <fcntl.h>
#include <time.h>
#endif
#ifdef SOLARIS
#define _STRUCTURED_PROC 1
#include <sys/procfs.h>
#endif
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include <string.h>
#include "jni.h"
#include "jvmti.h"
#include "org_netbeans_lib_profiler_server_system_GC.h"
#include "common_functions.h"
#define OBSERVED_PERIODS 10 /* must match OBSERVED_PERIODS in GC.java */
#define OBJECT_INT_SIZE 2
#define OBJECT_SIZE (sizeof(void *)*OBJECT_INT_SIZE)
static int gc_epoch_counter, start_index, end_index;
static jlong gc_start_timestamp, gc_finish_timestamp;
static jlong *run_times, *gc_times, *start_times,*finish_times;
void JNICALL register_gc_start(jvmtiEnv *jvmti_env) {
jlong new_timestamp = get_nano_time();
memmove(run_times, run_times + 1, (OBSERVED_PERIODS - 1) * sizeof(jlong));
run_times[OBSERVED_PERIODS - 1] = (new_timestamp - gc_finish_timestamp);
start_times[start_index] = new_timestamp;
start_index = (start_index+1) % OBSERVED_PERIODS;
gc_start_timestamp = new_timestamp;
}
void JNICALL register_gc_finish(jvmtiEnv *jvmti_env) {
jlong new_timestamp = get_nano_time();
memmove(gc_times, gc_times + 1, (OBSERVED_PERIODS - 1) * sizeof(jlong));
gc_times[OBSERVED_PERIODS - 1] = (new_timestamp - gc_start_timestamp);
finish_times[end_index] = new_timestamp;
end_index = (end_index+1) % OBSERVED_PERIODS;
gc_finish_timestamp = new_timestamp;
gc_epoch_counter++;
}
void enable_gc_start_finish_hook(JNIEnv *env, jboolean enable) {
jvmtiError res;
jvmtiEventMode mode;
if (enable) {
_jvmti_callbacks->GarbageCollectionStart = register_gc_start;
_jvmti_callbacks->GarbageCollectionFinish = register_gc_finish;
res = (*_jvmti)->SetEventCallbacks(_jvmti, _jvmti_callbacks, sizeof(*_jvmti_callbacks));
assert(res == JVMTI_ERROR_NONE);
mode = JVMTI_ENABLE;
} else {
mode = JVMTI_DISABLE;
}
res = (*_jvmti)->SetEventNotificationMode(_jvmti, mode, JVMTI_EVENT_GARBAGE_COLLECTION_START, NULL);
assert(res == JVMTI_ERROR_NONE);
res = (*_jvmti)->SetEventNotificationMode(_jvmti, mode, JVMTI_EVENT_GARBAGE_COLLECTION_FINISH, NULL);
assert(res == JVMTI_ERROR_NONE);
}
/*
* Class: profiler_server_system_GC
* Method: activateGCEpochCounter
* Signature: (Z)V
*/
JNIEXPORT void JNICALL Java_org_netbeans_lib_profiler_server_system_GC_activateGCEpochCounter
(JNIEnv *env, jclass clz, jboolean activate)
{
enable_gc_start_finish_hook(env, activate);
gc_epoch_counter = 0;
run_times = (jlong*) calloc(OBSERVED_PERIODS, sizeof(jlong));
gc_times = (jlong*) calloc(OBSERVED_PERIODS, sizeof(jlong));
start_times = (jlong*) calloc(OBSERVED_PERIODS, sizeof(jlong));
finish_times = (jlong*) calloc(OBSERVED_PERIODS, sizeof(jlong));
gc_finish_timestamp = get_nano_time(); /* We know this doesn't happen during GC */
}
/*
* Class: profiler_server_system_GC
* Method: resetGCEpochCounter
* Signature: ()V
*/
JNIEXPORT void JNICALL Java_org_netbeans_lib_profiler_server_system_GC_resetGCEpochCounter
(JNIEnv *env, jclass clz)
{
gc_epoch_counter = 0;
}
/*
* Class: profiler_server_system_GC
* Method: getCurrentGCEpoch
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_org_netbeans_lib_profiler_server_system_GC_getCurrentGCEpoch
(JNIEnv *env, jclass clz)
{
return gc_epoch_counter;
}
/*
* Class: profiler_server_system_GC
* Method: objectsAdjacent
* Signature: (Ljava/lang/Object;Ljava/lang/Object;)Z
*/
JNIEXPORT jboolean JNICALL Java_org_netbeans_lib_profiler_server_system_GC_objectsAdjacent
(JNIEnv *env, jclass clz, jobject jobj1, jobject jobj2)
{
/* Warning: this assumes the HotSpot VM and its current object handle format */
char* obj1 = jobj1 == NULL ? (char*) NULL : *((char**)(jobj1));
char* obj2 = jobj2 == NULL ? (char*) NULL : *((char**)(jobj2));
int diff = obj2 - obj1;
return (diff == OBJECT_SIZE) || (diff == -OBJECT_SIZE);
}
/*
* Class: profiler_server_system_GC
* Method: getGCRelativeTimeMetrics
* Signature: ([J)V
*/
JNIEXPORT void JNICALL Java_org_netbeans_lib_profiler_server_system_GC_getGCRelativeTimeMetrics
(JNIEnv *env, jclass clz, jlongArray metrics)
{
int i;
jlong total_gc_time = 0, total_run_time = 0;
jlong gc_pause_rel_time, last_gc_pause_in_micro;
for (i = 0; i < OBSERVED_PERIODS; i++) {
total_gc_time += gc_times[i];
}
for (i = 0; i < OBSERVED_PERIODS; i++) {
total_run_time += run_times[i];
}
/* We know this doesn't happen during GC, so we can calculate real run time */
total_run_time += (get_nano_time() - gc_finish_timestamp);
if (total_run_time == 0) {
gc_pause_rel_time = 0;
} else {
gc_pause_rel_time = (jlong) (((float) total_gc_time) / ((float) (total_gc_time + total_run_time)) * 1000);
}
last_gc_pause_in_micro = gc_times[OBSERVED_PERIODS - 1] * 1000000 / 1000000000;
(*env)->SetLongArrayRegion(env, metrics, 0, 1, &gc_pause_rel_time);
(*env)->SetLongArrayRegion(env, metrics, 1, 1, &last_gc_pause_in_micro);
}
/*
* Class: org_netbeans_lib_profiler_server_system_GC
* Method: getGCStartFinishTimes
* Signature: ([J[J)V
*/
JNIEXPORT void JNICALL Java_org_netbeans_lib_profiler_server_system_GC_getGCStartFinishTimes
(JNIEnv *env, jclass clz, jlongArray start, jlongArray finish)
{
(*env)->SetLongArrayRegion(env, start, 0, OBSERVED_PERIODS, start_times);
(*env)->SetLongArrayRegion(env, finish, 0, OBSERVED_PERIODS, finish_times);
}
/*
* Class: profiler_server_system_GC
* Method: runGC
* Signature: ()V
*/
JNIEXPORT void JNICALL Java_org_netbeans_lib_profiler_server_system_GC_runGC
(JNIEnv *env, jclass clz)
{
(*_jvmti)->ForceGarbageCollection(_jvmti);
}
|
Drilling in Alaska will not solve oil problems
Running across Enron Field during the Owls' baseball game on Friday seemed like a ha rmless prank to Jose De La Pena. But it laiided the Baker College senior in jail for the night, and he may face fines of up to $2,500. At the game, a group of about 25 Baker students, including De \ x Pena, along with Baker Master Mickey Quinones were sitting on the third base line. Quinones had purchased tickets to the game and given them to Baker students. According to Quinones, some students started daring De I.a Pena, to run from his seat to the flagpole in the middle of center field. The students collected $125 as incentive for De. La Pena to run. Quinones did not contribute to the pot. Baker freshman Mahek Shah said he gave money because he thought it would be entertaining. "I Ihought it would be fun to see him do it," Shah said. "It's good to contribute to a good cause." When Rice was in the field in the top of the eighth, De Pena hopped the fence and started running toward center field. The crowd, especially those in the Rice section, started cheering for him. "I was just cheering him on and hoped that he wouldn't trip," Shah said. De \jn Pena made it to the flagpole in center field, climbed the short wall and then scurried up to the Ruggles restaurant beyond the wall, where a security officer was waiting for him. Baker freshman Katie Fort was standing next to the security officer. "After jose made it over thecop said, 'Your friend didn't outrun the radio. He's going to jail,'" Fort said. De I-a Pena said the officer took him to a holding facility to wait for the Houston Police Department. After the Rice students realized the police See ARREST, Page 6 wwwmm* 1 & : • ,• |
// Copyright (c) 2021, Oracle and/or its affiliates.
// Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
// Package report handles reporting
package report
import (
"bufio"
"errors"
"fmt"
"go.uber.org/zap"
"os"
"sync"
)
// NOTE: This is part of the contract with the analyzers however it is currently an initial stake in the ground and
// will be evolving rapidly initially as we add analysis cases
// TODO: We have rudimentary settings and a rudimentary dump of the report to start with here. Ie: Bare bones to get
// the details out for now, go from there... But there are some things that are already on the radar here:
//
// 1) Format of the human readable report will evolve (wrapping long lines, etc...)
// 2) Other format outputs suitable for automation to process (ie: other automation that will look at the results
// and do something automatically with it), maybe CSV file, JSON, etc...
// 3) Detail consolidation/redacting suitable for sharing as a Bug report
// 4) Etc...
//
// For example, when we report them we will want to report:
// 1) Per source (cluster, build, etc...)
// 2) Sort in priority order (worst first...) TODO
// Tossing around whether per-source, if we have a map for tracking Issues so we have one Issue per type of issue
// and allow contributing supporting data to it (rather than separate issues for each case found if found in different spots
// I'm hesitant to do that now as that reduces the flexibility, and until we really have the analysis drill-down patterns and
// more scenarios in place I think it is premature to do that (we could maybe allow both, but again not sure we need
// that complexity yet either). One good example is that when there a a bunch of pods impacted by the same root cause
// issue, really don't need to spam with a bunch of the same issue (we could add additional supporting data to one root
// issue instead of having an issue for each occurrence), but the analyzer can manage that and knows more about whether
// it is really a different issue or not.
// We have a map per source. The source is a string here. Generally for clusters it would be something that identifies
// the cluster. But other analyzers may not be looking at a cluster, so they may have some other identification.
// For the current implementation, these are the root file path that the analyzer is looking at.
var reports = make(map[string][]Issue)
var allSourcesAnalyzed = make(map[string]string)
var reportMutex = &sync.Mutex{}
// ContributeIssuesMap allows a map of issues to be contributed
func ContributeIssuesMap(log *zap.SugaredLogger, source string, issues map[string]Issue) (err error) {
log.Debugf("ContributeIssues called for source %s with %d issues", len(issues))
if len(source) == 0 {
return errors.New("ContributeIssues requires a non-empty source be specified")
}
for _, issue := range issues {
err = issue.Validate(log, source)
if err != nil {
return err
}
}
reportMutex.Lock()
reportIssues := reports[source]
if len(reportIssues) == 0 {
reportIssues = make([]Issue, 0, 10)
}
for _, issue := range issues {
issue.SupportingData = DeduplicateSupportingData(issue.SupportingData)
reportIssues = append(reportIssues, issue)
}
reports[source] = reportIssues
reportMutex.Unlock()
return nil
}
// ContributeIssue allows a single issue to be contributed
func ContributeIssue(log *zap.SugaredLogger, issue Issue) (err error) {
log.Debugf("ContributeIssue called for source %s with %v", issue)
err = issue.Validate(log, "")
if err != nil {
log.Debugf("Validate failed", err)
return err
}
reportMutex.Lock()
reportIssues := reports[issue.Source]
if len(reportIssues) == 0 {
reportIssues = make([]Issue, 0, 10)
}
issue.SupportingData = DeduplicateSupportingData(issue.SupportingData)
reportIssues = append(reportIssues, issue)
reports[issue.Source] = reportIssues
reportMutex.Unlock()
return nil
}
// GenerateHumanReport is a basic report generator
// TODO: This is super basic for now, need to do things like sort based on Confidence, add other formats on output, etc...
// Also add other niceties like time, Summary of what was analyzed, if no issues were found, etc...
func GenerateHumanReport(log *zap.SugaredLogger, reportFile string, includeSupportData bool, includeInfo bool, includeActions bool, minConfidence int, minImpact int) (err error) {
// Default to stdout if no reportfile is supplied
var writeOut = bufio.NewWriter(os.Stdout)
if len(reportFile) > 0 {
log.Debugf("Generating human report to file: %s", reportFile)
// Open the file for write
fileOut, err := os.Create(reportFile)
if err != nil {
log.Errorf("Failed to create report file %s", reportFile, err)
return err
}
defer fileOut.Close()
writeOut = bufio.NewWriter(fileOut)
} else {
log.Debugf("Generating human report to stdout")
}
// Lock the report data while generating the report itself
reportMutex.Lock()
sourcesWithoutIssues := allSourcesAnalyzed
for source, reportIssues := range reports {
log.Debugf("Will report on %d issues that were reported for %s", len(reportIssues), source)
// We need to filter and sort the list of Issues that will be reported
// TODO: Need to sort them as well eventually
actuallyReported := filterReportIssues(log, reportIssues, includeInfo, minConfidence, minImpact)
if len(actuallyReported) == 0 {
log.Debugf("No issues to report for source: %s")
continue
}
// Print the Source as it has issues
delete(sourcesWithoutIssues, source)
_, err = fmt.Fprintf(writeOut, "\n\nDetected %d issues for %s:\n\n", len(actuallyReported), source)
if err != nil {
return err
}
for _, issue := range actuallyReported {
// Print the Issue out
_, err = fmt.Fprintf(writeOut, "\n\tISSUE (%s)\n\t\tsummary: %s\n", issue.Type, issue.Summary)
if err != nil {
return err
}
_, err = fmt.Fprintf(writeOut, "\t\tconfidence: %d\n", issue.Confidence)
if err != nil {
return err
}
_, err = fmt.Fprintf(writeOut, "\t\timpact: %d\n", issue.Impact)
if err != nil {
return err
}
if len(issue.Actions) > 0 && includeActions {
log.Debugf("Output actions")
_, err = fmt.Fprintf(writeOut, "\t\tactions:\n")
if err != nil {
return err
}
for _, action := range issue.Actions {
_, err = fmt.Fprintf(writeOut, "\t\t\taction: %s\n", action.Summary)
if err != nil {
return err
}
if len(action.Steps) > 0 {
_, err = fmt.Fprintf(writeOut, "\t\t\t\tSteps:\n")
if err != nil {
return err
}
for i, step := range action.Steps {
_, err = fmt.Fprintf(writeOut, "\t\t\t\t\tStep %d: %s\n", i+1, step)
if err != nil {
return err
}
}
}
if len(action.Links) > 0 {
_, err = fmt.Fprintf(writeOut, "\t\t\t\tLinks:\n")
if err != nil {
return err
}
for _, link := range action.Links {
_, err = fmt.Fprintf(writeOut, "\t\t\t\t\t%s\n", link)
if err != nil {
return err
}
}
}
}
}
if len(issue.SupportingData) > 0 && includeSupportData {
log.Debugf("Output supporting data")
_, err = fmt.Fprintf(writeOut, "\t\tsupportingData:\n")
if err != nil {
return err
}
for _, data := range issue.SupportingData {
if len(data.Messages) > 0 {
_, err = fmt.Fprintf(writeOut, "\t\t\tmessages:\n")
if err != nil {
return err
}
for _, message := range data.Messages {
_, err = fmt.Fprintf(writeOut, "\t\t\t\t%s\n", message)
if err != nil {
return err
}
}
}
if len(data.TextMatches) > 0 {
_, err = fmt.Fprintf(writeOut, "\t\t\tsearch matches:\n")
if err != nil {
return err
}
for _, match := range data.TextMatches {
_, err = fmt.Fprintf(writeOut, "\t\t\t\t%s:%d: %s\n", match.FileName, match.FileLine, match.MatchedText)
if err != nil {
return err
}
}
}
if len(data.JSONPaths) > 0 {
_, err = fmt.Fprintf(writeOut, "\t\t\trelated json:\n")
if err != nil {
return err
}
for _, path := range data.JSONPaths {
_, err = fmt.Fprintf(writeOut, "\t\t\t\t%s: %s\n", path.File, path.Path)
if err != nil {
return err
}
}
}
if len(data.RelatedFiles) > 0 {
_, err = fmt.Fprintf(writeOut, "\t\t\trelated files:\n")
if err != nil {
return err
}
for _, fileName := range data.RelatedFiles {
_, err = fmt.Fprintf(writeOut, "\t\t\t\t%s\n", fileName)
if err != nil {
return err
}
}
}
}
}
}
}
if includeInfo {
if len(sourcesWithoutIssues) > 0 {
_, err = fmt.Fprintf(writeOut, "\n\n")
}
for _, source := range sourcesWithoutIssues {
_, err = fmt.Fprintf(writeOut, "INFO: No issues detected or to report for %s\n", source)
}
}
log.Debugf("Flushing output")
err = writeOut.Flush()
if err != nil {
log.Errorf("Failed to flush writer for file %s", reportFile, err)
return err
}
reportMutex.Unlock()
return nil
}
// AddSourceAnalyzed tells the report which sources have been analyzed. This way it knows
// the entire set of sources which were analyzed (not just the ones which had issues detected)
func AddSourceAnalyzed(source string) {
reportMutex.Lock()
allSourcesAnalyzed[source] = source
reportMutex.Unlock()
}
// GetAllSourcesFilteredIssues is only being exported for the unit tests so they can inspect issues found in a report
func GetAllSourcesFilteredIssues(log *zap.SugaredLogger, includeInfo bool, minConfidence int, minImpact int) (filtered []Issue) {
reportMutex.Lock()
for _, reportIssues := range reports {
subFiltered := filterReportIssues(log, reportIssues, includeInfo, minConfidence, minImpact)
if len(subFiltered) > 0 {
filtered = append(filtered, subFiltered...)
}
}
reportMutex.Unlock()
return filtered
}
func filterReportIssues(log *zap.SugaredLogger, reportIssues []Issue, includeInfo bool, minConfidence int, minImpact int) (filtered []Issue) {
filtered = make([]Issue, 0, len(reportIssues))
for _, issue := range reportIssues {
// Skip issues that are Informational or lower Confidence that we want
if issue.Informational && !includeInfo || issue.Confidence < minConfidence || issue.Impact < minImpact {
log.Debugf("Skipping issue %s based on informational/confidence/impact settings", issue.Summary)
continue
}
filtered = append(filtered, issue)
}
return filtered
}
|
// CreateStorage creates a storage access class.
func (inst DataModel) CreateStorage() (Storage, error) {
var storage ref
ret := C.CCall_libmcdata_datamodel_createstorage(inst.wrapperRef.LibraryHandle, inst.Ref, &storage)
if ret != 0 {
return Storage{}, makeError(uint32(ret))
}
return inst.wrapperRef.NewStorage(storage), nil
} |
def _check_allocation(weights, asset_class_name):
s = sum(weights.values())
if not math.isclose(s, 1, rel_tol=1e-09, abs_tol=0.0):
raise Exception('Error: {} allocation of \'{}\' is not 100%!!!'
.format(asset_class_name, s)) |
/*
* These functions are used early on before PCI scanning is done
* and all of the pci_dev and pci_bus structures have been created.
*/
static struct pci_dev *fake_pci_dev(struct pci_channel *hose,
int top_bus, int busnr, int devfn)
{
static struct pci_dev dev;
static struct pci_bus bus;
dev.bus = &bus;
dev.sysdata = hose;
dev.devfn = devfn;
bus.number = busnr;
bus.ops = hose->pci_ops;
if(busnr != top_bus)
bus.parent = &bus;
else
bus.parent = NULL;
return &dev;
} |
/**
* <p>Shared Diffie-Hellman instance to minimize key exchange overhead. New key
* request data should be generated after every successful key exchange.</p>
*
* <p>This class is thread-safe.</p>
*
* @author Wesley Miaw <[email protected]>
*/
public class DiffieHellmanManager {
/**
* @param params Diffie-Hellman parameters.
* @param paramId the ID of the Diffie-Hellman parameters to use.
*/
public DiffieHellmanManager(DiffieHellmanParameters params, final String paramId) {
this.params = params;
this.paramId = paramId;
}
/**
* <p>Return the current Diffie-Hellman key exchange request data. If no
* request data exists new data is generated.</p>
*
* @return the Diffie-Hellman request data.
* @throws MslKeyExchangeException if there is an error accessing the
* Diffie-Hellman parameters.
* @see #clearRequest()
*/
public synchronized RequestData getRequestData() throws MslKeyExchangeException {
// Generate new request data if necessary.
if (request == null) {
final DHParameterSpec paramSpec = params.getParameterSpec(paramId);
final KeyPairGenerator generator;
try {
generator = KeyPairGenerator.getInstance("DH");
generator.initialize(paramSpec);
} catch (final NoSuchAlgorithmException e) {
throw new MslInternalException("Diffie-Hellman algorithm not found.", e);
} catch (final InvalidAlgorithmParameterException e) {
throw new MslInternalException("Diffie-Hellman algorithm parameters rejected by Diffie-Hellman key agreement.", e);
}
final KeyPair requestKeyPair = generator.generateKeyPair();
final BigInteger publicKey = ((DHPublicKey)requestKeyPair.getPublic()).getY();
final DHPrivateKey privateKey = (DHPrivateKey)requestKeyPair.getPrivate();
request = new RequestData(KanColleDiffieHellmanParameters.PARAM_ID, publicKey, privateKey);
}
return request;
}
/**
* <p>Clear the current Diffie-Hellman key exchange request data. The next
* call to {@link #getRequestData()} will generate new request data.</p>
*
* @see #getRequestData()
*/
public synchronized void clearRequest() {
request = null;
}
/** The Diffie-Hellman parameters. */
private final DiffieHellmanParameters params;
/** The Diffie-Hellman parameters ID to use. */
private final String paramId;
/** The current request data. */
private RequestData request;
} |
"""Entry-point for Galaxy specific functionality in Planemo."""
from __future__ import absolute_import
from .config import galaxy_config
from .run import (
run_galaxy_command,
setup_venv,
)
from .serve import serve as galaxy_serve
from .serve import shed_serve
__all__ = (
"galaxy_config",
"setup_venv",
"run_galaxy_command",
"galaxy_serve",
"shed_serve",
)
|
<gh_stars>0
import matplotlib.pyplot as plt
# Diagramm und Achsen definieren
fig, ax = plt.subplots()
# Werte für Tabelle erstellen
table_data=[
["1", 30, 34],
["2", 20, 223],
["3", 33, 2354],
["4", 25, 234],
["5", 12, 929]
]
#Tabelle erstellen
table = ax.table(cellText=table_data, loc='center', colLabels=['SD', 'ID', 'Score'])
# Tabelle ändern
table.set_fontsize(14)
table.scale(1,4)
ax.axis('off')
#Tabelle anzeigen
plt.show()
|
<gh_stars>1-10
import type { MultiSelectStylesNames } from '@mantine/core';
import { Input } from './Input.styles-api';
import { InputWrapper } from './InputWrapper.styles-api';
const InputStyles = { ...Input };
delete InputStyles.rightSection;
export const MultiSelect: Record<MultiSelectStylesNames, string> = {
wrapper: 'Wrapper around input and dropdown',
dropdown: 'Dropdown element',
item: 'Item element, rendered inside dropdown',
hovered: 'Hovered item modifier, added to item when it is selected with arrows or hovered',
disabled: 'Disabled item modifier',
nothingFound: 'Nothing found label',
values: 'Values wrapper',
value: 'Value element',
searchInput: 'Search input, rendered after all values',
defaultValue: 'Default value component wrapper',
defaultValueRemove: 'Default value remove control',
separator: 'Divider wrapper',
separatorLabel: 'Divider Label',
...InputStyles,
...InputWrapper,
};
|
Simulation of self-erase discharge waveforms in plasma display panels
We use a two-dimensional self-consistent fluid model to simulate the operation of a plasma display panel cell when different sustaining voltage waveforms are applied to its electrodes. The discharge path is much longer when a self-erase discharge waveform is used instead of the standard waveform. The longer discharge path results in higher efficiency. |
<gh_stars>0
import { Component, OnInit, ChangeDetectorRef } from '@angular/core';
import { CompanyService } from '../services/company.service';
import { Observable } from 'rxjs';
import { Company } from '../models';
import { tap } from 'rxjs/operators';
import { Store } from '@ngrx/store';
import { CompanySelectors, CompanyState } from '../reducers';
import { CompanyActions } from '../actions';
/**
* asybc pipes with ngIf
* //https://toddmotto.com/angular-ngif-async-pipe
*/
@Component({
selector: 'app-companies',
templateUrl: './companies.container.html'
})
export class CompaniesContainerComponent implements OnInit {
// companies$: Observable<Company[]> = this.companiesService.getItems()
companies$: Observable<Company[]> = this.store.select(CompanySelectors.selectAllCompanies)
.pipe(
tap(items => console.log(`received updated items: ${JSON.stringify(items)}`))
);
currentCompany$: Observable<Company> = this.store.select(CompanySelectors.selectCurrentCompany)
.pipe(
tap(item => console.log(`selected company: ${JSON.stringify(item)}`))
);
constructor(
private store: Store<CompanyState>,
// private companiesService: CompanyService,
// private cd: ChangeDetectorRef
) { }
ngOnInit() {
this.store.dispatch(new CompanyActions.LoadCompanies());
}
updateCompany(company: Company): void {
console.log(`updated company: ${JSON.stringify(company)}`);
this.store.dispatch(new CompanyActions.UpdateCompany({
company: company
}));
// this.companiesService.updateItem(company).subscribe((result) => {
// // this.cd.markForCheck();
// console.log(`update was successful:${result}`);
// });
}
addCompany(company: Company): void {
console.log(`added company: ${JSON.stringify(company)}`);
this.store.dispatch(new CompanyActions.AddCompany({
company: company
}));
// this.companiesService.addItem(company).subscribe((result) => {
// console.log(`add was successful:${result}`);
// });
}
selectCompany(id: number): void {
console.log(`selected company id: ${id}`);
this.store.dispatch(new CompanyActions.SelectCompany({
id: id
}));
}
}
|
#include <algorithm>
#include <iostream>
#include <vector>
using namespace std;
class Solution {
public:
string largestTimeFromDigits(vector<int>& arr) {
string s = "HH:MM";
sort(arr.rbegin(), arr.rend());
for (int i = 0; i < 4; ++i) {
if (arr[i] > 2) continue;
s[0] = arr[i] + '0';
for (int j = 0; j < 4; ++j) {
int h = arr[i] * 10 + arr[j];
if (h > 23 || i == j) continue;
s[1] = arr[j] + '0';
for (int p = 0; p < 4; ++p) {
if (p == i || p == j || arr[p] > 5) continue;
s[3] = arr[p] + '0';
for (int q = 0; q < 4; ++q) {
if (q == i || q == j || q == p) continue;
s[4] = arr[q] + '0';
return s;
}
}
}
}
return "";
}
}; |
from copy import deepcopy
n, x = map(int, input().split())
a = list(map(int, input().split()))
ans = sum(a)
a1 = deepcopy(a)
for i in range(1, n):
cur = x * i
a2 = deepcopy(a1)
for j in range(n):
a1[n - j - 1] = min(a2[n - j - 1], a[n - i - j - 1])
cur += a1[n - j - 1]
ans = min(ans, cur)
print(ans) |
// Popd changes out of the current directory to the previous directory.
func (s *Script) popd(_ string) error {
if len(s.dirs) > 1 {
s.dirs = s.dirs[1:]
}
return nil
} |
# !/usr/bin/python
# -*- coding: utf-8 -*-
# Created on 2020-03-22
# Class: Courses Spider
# Comment: Initiate requests, specify fields to be scraped and stored to item model
import scrapy
import re
import sys
from ..items import CourseGetterItem
class CoursesSpider(scrapy.Spider):
'''
Initiate requests
Specify fields to be scraped and stored to item model
'''
name = 'courses'
def __init__(self, address='', *args, **kwargs):
'''
Start crawl requests to "https://www.coursera.org/courses"
Args:
self: instance of the class
address: custom url, specified in command line arg
'''
# process the entered url
if address == '':
print("Custom url is not specified or invalid. System will use default url instead.")
self.start_urls = ["https://www.coursera.org/courses?query=&indices%5Bprod_all_products_term_optimization%5D%5Bpage%5D=" + str(i)+ "&indices%5Bprod_all_products_term_optimization%5D%5Bconfigure%5D%5BclickAnalytics%5D=true&indices%5Bprod_all_products_term_optimization%5D%5Bconfigure%5D%5BruleContexts%5D%5B0%5D=en&indices%5Bprod_all_products_term_optimization%5D%5Bconfigure%5D%5BhitsPerPage%5D=10&configure%5BclickAnalytics%5D=true" for i in range(1,101)]
else:
self.start_urls = ['{!s}'.format(address)]
super(CoursesSpider, self).__init__(*args, **kwargs)
def parse(self, response):
'''
Write reponse to item model
Args:
self: instance of the class
response: response object obtained from the request
'''
items = CourseGetterItem()
# specify the location of the fields to be scraped on the web page
titles = response.xpath('//h2[@class="color-primary-text card-title headline-1-text"]/text()').getall()
partners = response.css("span.partner-name::text").getall()
ratings = response.css("span.ratings-text::text").getall()
count = response.xpath('//span[@class="ratings-count"]/span/text()').getall()
enrollment = response.css("span.enrollment-number::text").getall()
level = response.css("span.difficulty::text").getall()
# specify the field names in item model
items['title'] = titles
items['partner'] = partners
items['rating'] = ratings
items['count'] = count
items['enrollment'] = enrollment
items['level'] = level
yield items
|
/* Return true if the current function should treat register REGNO
as call-saved. */
static bool
mips_cfun_call_saved_reg_p (unsigned int regno)
{
if (global_regs[regno])
return false;
if (cfun->machine->interrupt_handler_p
&& mips_interrupt_extra_call_saved_reg_p (regno))
return true;
return (regno == GLOBAL_POINTER_REGNUM
? TARGET_CALL_SAVED_GP
: !call_really_used_regs[regno]);
} |
TRANSFER TRACKER STATUS: Rumor
A Nigerian striker with a Bundesliga pedigree has drawn MLS interest, according to a story from online outlet Score Nigeria.
Anthony Ujah has cancelled his contract with Chinese side Liaoning Whowin and returned to Germany with his family. But clubs "Like LA Galaxy or Seattle Sounders" have shown interest in the forward, who before moving to China enjoyed consecutive double-digit goal seasons in the Bundesliga, with Cologne in 2014-15 and Werder Bremen the following year.
The Galaxy experienced more of their troubles on the defensive end in their miserable 2017 campaign. But they would still benefit from a player of Ujah's caliber up front as they try to rebound in 2018 while potentially parting ways with Gyasi Zardes.
The Sounders have also been shown a willingness to add quality depth even if they appear set at a position, and they could need some attacking help depending on Clint Dempsey's future. |
def safe_var(entity, **kwargs):
warnings.warn("As of >=pytorch0.4.0 this is no longer necessary",
DeprecationWarning)
if isinstance(entity, Variable):
return entity
elif isinstance(entity, torch._C._TensorBase):
return Variable(entity, **kwargs)
else:
raise Exception("Can't cast %s to a Variable" %
entity.__class__.__name__) |
<gh_stars>1-10
#! /usr/bin/env python2
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import sys
from os.path import isdir, abspath, dirname, join
from os import _exit
# Be sure that the tools directory is in the search path
ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from tools.utils import args_error
from tools.options import get_default_options_parser
from tools.options import extract_mcus
from tools.build_api import get_config
from tools.config import Config
from tools.utils import argparse_filestring_type
try:
import tools.private_settings as ps
except:
ps = object()
if __name__ == '__main__':
# Parse Options
parser = get_default_options_parser(add_clean=False, add_options=False)
parser.add_argument("--source", dest="source_dir", type=argparse_filestring_type, required=True,
default=[], help="The source (input) directory", action="append")
parser.add_argument("--prefix", dest="prefix", action="append",
default=[], help="Restrict listing to parameters that have this prefix")
parser.add_argument("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="Verbose diagnostic output")
options = parser.parse_args()
# Target
if options.mcu is None :
args_error(parser, "argument -m/--mcu is required")
target = extract_mcus(parser, options)[0]
# Toolchain
if options.tool is None:
args_error(parser, "argument -t/--toolchain is required")
toolchain = options.tool[0]
options.prefix = options.prefix or [""]
try:
params, macros, features = get_config(options.source_dir, target, toolchain)
if not params and not macros:
print("No configuration data available.")
_exit(0)
if params:
print("Configuration parameters")
print("------------------------")
for p in sorted(params):
for s in options.prefix:
if p.startswith(s):
print(str(params[p]) if not options.verbose else params[p].get_verbose_description())
break
print("")
print("Macros")
print("------")
if macros:
print('Defined with "macros":', Config.config_macros_to_macros(macros))
print("Generated from configuration parameters:", Config.parameters_to_macros(params))
except KeyboardInterrupt as e:
print("\n[CTRL+c] exit")
except Exception as e:
if options.verbose:
import traceback
traceback.print_exc(file=sys.stdout)
else:
print("[ERROR] %s" % str(e))
sys.exit(1)
|
package exercise27;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Objects;
public class SortedMatrixFinder {
private static final Logger logger = LoggerFactory.getLogger(SortedMatrixFinder.class);
public Result find (double target, double[][] nums) {
logger.debug("Enter with target: {}, nums: {}", target, Arrays.deepToString(nums));
Result found = find(target, nums, 0, nums.length, 0, nums[0].length);
logger.debug("found {}", found);
logger.debug("");
return found;
}
public Result find(double target, double[][] nums,
int dim1Start, int dim1End, int dim2Start, int dim2End) {
logger.debug("Enter with dim1Start: {}, dim1End: {}, dim2Start: {}, dim2End: {}",
dim1Start, dim1End, dim2Start, dim2End);
int dim1Len = dim1End - dim1Start;
int dim2Len = dim2End - dim2Start;
int dimLen;
if (dim1Len == 1) {
int start = dim2Start;
int end = dim2End;
int mid;
while (start < end) {
mid = (end + start) / 2;
if (Double.compare(nums[dim1Start][mid], target) < 0) {
start = mid + 1;
} else if (Double.compare(nums[dim1Start][mid], target) > 0) {
end = mid;
} else {
return new Result(true, dim1Start, mid);
}
}
return Result.notFound;
} else if (dim2Len == 1) {
int start = dim1Start;
int end = dim1End;
int mid;
while (start < end) {
mid = (end + start) / 2;
if (Double.compare(nums[mid][dim2Start], target) < 0) {
start = mid + 1;
} else if (Double.compare(nums[mid][dim2Start], target) > 0) {
end = mid;
} else {
return new Result(true, mid, dim2Start);
}
}
return Result.notFound;
}
if (dim1Len <= dim2Len) {
dimLen = dim1Len;
} else {
dimLen = dim2Len;
}
logger.debug("dimLen: {}", dimLen);
int start = 0;
int end = dimLen;
int mid;
while (start < end) {
mid = (end + start) / 2;
if (Double.compare(nums[dim1Start + mid][dim2Start + mid], target) < 0) {
start = mid + 1;
} else if (Double.compare(nums[dim1Start + mid][dim2Start + mid], target) > 0) {
end = mid;
} else {
return new Result(true, dim1Start + mid, dim2Start + mid);
}
}
if (Double.compare(nums[dim1Start + start][dim2Start + start], target) > 0) {
if (start == 0) {
return Result.notFound;
}
start -= 1;
}
Result result = find(target, nums,
dim1Start, dim1Start + start + 1, dim2Start + start + 1, dim2End);
if (result.isFound()) {
return result;
}
result = find(target, nums,
dim1Start + start + 1, dim1End, dim2Start, dim2Start + start + 1);
return result;
}
public static class Result {
boolean found;
int dim1;
int dim2;
public static final Result notFound = new Result(false, 0, 0);
public Result(boolean found, int dim1, int dim2) {
this.found = found;
this.dim1 = dim1;
this.dim2 = dim2;
}
public boolean isFound() {
return found;
}
public int getDim1() {
return dim1;
}
public int getDim2() {
return dim2;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result result = (Result) o;
return found == result.found &&
dim1 == result.dim1 &&
dim2 == result.dim2;
}
@Override
public int hashCode() {
return Objects.hash(found, dim1, dim2);
}
@Override
public String toString() {
return "Result{" +
"found=" + found +
", dim1=" + dim1 +
", dim2=" + dim2 +
'}';
}
}
}
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.