content
stringlengths 10
4.9M
|
---|
a = raw_input()
b = raw_input()
can_be_solved = True
aa = []
bb = []
for i in a.split(' '):
aa.append(int(i))
for i in b.split(' '):
bb.append(int(i))
av = aa[2]
while av < aa[1]:
if len(bb) == 0:
can_be_solved = False
break
maximum = max(bb)
bb.remove(maximum)
av += maximum
av -= 1
print aa[0] - len(bb) if can_be_solved else -1
|
/// Free an existing allocation, return how much was removed, if any.
pub fn free_allocation(&mut self, process: ProcessUid, address: usize) -> Option<usize> {
// Before we reduce memory, let's check if we've previously hit a peak:
self.check_if_new_peak();
if let Some(removed) = self
.current_allocations
.entry(process)
.or_default()
.remove(&address)
{
self.remove_memory_usage(removed.callstack_id, removed.size());
Some(removed.size())
} else {
// This allocation doesn't exist; often this will be something
// allocated before Fil tracking was started, but it might also be a
// bug.
#[cfg(not(feature = "fil4prod"))]
if *crate::util::DEBUG_MODE {
self.failed_deallocations += 1;
eprintln!(
"=fil-profile= Your program attempted to free an allocation at an address we don't know about:"
);
eprintln!("=| {:?}", backtrace::Backtrace::new());
}
None
}
} |
def parse_autxt(self):
self.autxtauids = dict()
for i, line in enumerate(self.autxtlines):
if '.reserved.' not in line:
continue
pluginid, dot, aukey = line.rpartition('.reserved.')[0].partition('org.lockss.au.')[2].partition('.')
auid = '%s&%s' % (pluginid, aukey)
if auid not in self.options.auids:
continue
if '.reserved.disabled=' in line:
self.autxtauids.setdefault(auid, -1)
continue
if '.reserved.repository=' in line:
self.autxtauids[auid] = i
errors = list()
for auid in self.options.auids:
if auid not in self.autxtauids:
errors.append(auid)
if len(errors) > 0:
print 'AUIDs not found in au.txt:'
for auid in errors:
print auid
if not self.options.warn_if_missing:
sys.exit('%d error%s; exiting' % (len(errors), '' if len(errors) == 1 else 's'))
for auid in self.options.auids:
if self.autxtauids.get(auid) == -1:
self.autxtauids[auid] = len(self.autxtlines)
self.autxtlines.append('org.lockss.au.%s.%s.reserved.repository=local\\:%s' % (auid.partition('&')[0], auid.partition('&')[2], self.options.defrepo)) |
/**
* Created by rnkrsoft.com on 2019/9/19.
*/
public class Bopomofo4jTest {
@Test
public void testPinyin1() throws Exception {
{
String py = Bopomofo4j.pinyin("在这里输入你要转换的中文,然后点下面APM(Actions Per Minute)是一个在游戏领域常见的概念", ToneType.WITHOUT_TONE, null, null, null);
Assert.assertEquals("zai zhe li shu ru ni yao zhuan huan de zhong wen, ran hou dian xia mianAPM(Actions Per Minute) shi yi ge zai you xi ling yu chang jian de gai nian", py);
}
{
String py = Bopomofo4j.pinyin("在这里输入你要转换的中文,然后点下面APM(Actions Per Minute)是一个在游戏领域常见的概念", ToneType.WITH_VOWEL_TONE, null, null, null);
Assert.assertEquals("zài zhè lǐ shū rù nǐ yào zhuǎn huàn de zhōng wén, rán hòu diǎn xià miànAPM(Actions Per Minute) shì yī gè zài yóu xì lǐng yù cháng jiàn de gài niàn", py);
}
}
@Test
public void testPinyin() throws Exception {
// Bopomofo4j.local();
System.setProperty("bopomofo4j.temp.dir", "./target/temp");
// System.setProperty("bopomofo4j.sandbox.url", "https://oss.sonatype.org/service/local/repositories/comrnkrsoft-1081/content/com/rnkrsoft/bopomofo4j/bopomofo4j/1.0.0/bopomofo4j-1.0.0.jar");
for (int i = 0; i < 10000; i++) {
String py1 = Bopomofo4j.pinyin("I am chinese!1234", ToneType.WITHOUT_TONE, null, null, null);
Assert.assertEquals("I am chinese!1234", py1);
String py2 = Bopomofo4j.pinyin("I am chinese!1234中国人", ToneType.WITH_VOWEL_TONE, null, null, null);
Assert.assertEquals("I am chinese!1234 zhōng guó rén", py2);
String py3 = Bopomofo4j.pinyin("I am chinese!1234我们是患难与共的兄弟", ToneType.WITH_NUMBER_TONE, null, null, null);
Assert.assertEquals("I am chinese!1234 wo3 men0 shi4 huan0 nan0 yu3 gong0 de0 xiong0 di4", py3);
}
}
@Test
public void testCht2chs() throws Exception {
Bopomofo4j.local();
String chs = Bopomofo4j.cht2chs("APM(Actions Per Minute)是一個在遊戲領域常見的概念,主要在Starcraft等實時對戰遊戲中用到。一般職業選手的APM能高達300甚至更多。本測試采用一個簡單的模型來測試你的APM:從50到1,快速點擊相應的圓圈,第一次點擊時開始計時。您有充足的時間準備,所以請不要著急,觀察好之後再出手!");
Assert.assertEquals("APM(Actions Per Minute)是一个在游戏领域常见的概念,主要在Starcraft等实时对战游戏中用到。一般职业选手的APM能高达300甚至更多。本测试采用一个简单的模型来测试你的APM:从50到1,快速点击相应的圆圈,第一次点击时开始计时。您有充足的时间准备,所以请不要着急,观察好之后再出手!", chs);
System.out.println(chs);
}
@Test
public void testChs2cht() throws Exception {
Bopomofo4j.local();
String cht = Bopomofo4j.chs2cht("APM(Actions Per Minute)是一个在游戏领域常见的概念,主要在Starcraft等实时对战游戏中用到。一般职业选手的APM能高达300甚至更多。本测试采用一个简单的模型来测试你的APM:从50到1,快速点击相应的圆圈,第一次点击时开始计时。您有充足的时间准备,所以请不要着急,观察好之后再出手!");
Assert.assertEquals("APM(Actions Per Minute)是一個在遊戲領域常見的概念,主要在Starcraft等實時對戰遊戲中用到。一般職業選手的APM能高達300甚至更多。本測試采用一個簡單的模型來測試你的APM:從50到1,快速點擊相應的圓圈,第一次點擊時開始計時。您有充足的時間準備,所以請不要著急,觀察好之後再出手!", cht);
System.out.println(cht);
}
} |
/*
* Copyright 2005-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openwms.wms.movements.impl;
import org.openwms.core.SpringProfiles;
import org.openwms.wms.movements.commands.MovementMO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.amqp.core.AmqpTemplate;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
import org.springframework.transaction.event.TransactionPhase;
import org.springframework.transaction.event.TransactionalEventListener;
import static org.openwms.wms.movements.events.api.MovementEvent.Type.CANCELLED;
import static org.openwms.wms.movements.events.api.MovementEvent.Type.COMPLETED;
import static org.openwms.wms.movements.events.api.MovementEvent.Type.CREATED;
import static org.openwms.wms.movements.events.api.MovementEvent.Type.MOVED;
/**
* A MovementEventPropagator is active with the {@value SpringProfiles#ASYNCHRONOUS_PROFILE} profile and propagates internal events to the
* outer world over AMQP protocol.
*
* @author <NAME>
*/
@Profile(SpringProfiles.ASYNCHRONOUS_PROFILE)
@Component
class MovementEventPropagator {
private static final Logger LOGGER = LoggerFactory.getLogger(MovementEventPropagator.class);
private final String exchangeName;
private final AmqpTemplate amqpTemplate;
MovementEventPropagator(
@Value("${owms.movements.exchange-name}") String exchangeName,
AmqpTemplate amqpTemplate) {
this.exchangeName = exchangeName;
this.amqpTemplate = amqpTemplate;
}
@TransactionalEventListener(phase = TransactionPhase.AFTER_COMMIT)
public void onEvent(MovementEvent event) {
var movement = event.getSource();
var vo = MovementMO.newBuilder()
.pKey(movement.getPersistentKey())
.transportUnitBK(movement.getTransportUnitBk().getValue())
.initiator(movement.getInitiator())
.target(movement.getTargetLocationGroup())
.build();
switch (event.getType()) {
case CREATED:
LOGGER.info("Movement has been CREATED [{}]", movement);
amqpTemplate.convertAndSend(exchangeName, "movement.event.created",
org.openwms.wms.movements.events.api.MovementEvent.newBuilder()
.type(CREATED)
.movement(vo)
.build()
);
break;
case CANCELLED:
LOGGER.info("Movement has been CANCELLED [{}]", movement);
amqpTemplate.convertAndSend(exchangeName, "movement.event.cancelled",
org.openwms.wms.movements.events.api.MovementEvent.newBuilder()
.type(CANCELLED)
.movement(vo)
.build()
);
break;
case COMPLETED:
LOGGER.info("Movement has been COMPLETED [{}]", movement);
amqpTemplate.convertAndSend(exchangeName, "movement.event.completed",
org.openwms.wms.movements.events.api.MovementEvent.newBuilder()
.type(COMPLETED)
.movement(vo)
.build()
);
break;
case MOVED:
LOGGER.info("Movement has been MOVED [{}]", movement);
amqpTemplate.convertAndSend(exchangeName, "movement.event.moved",
org.openwms.wms.movements.events.api.MovementEvent.newBuilder()
.type(MOVED)
.movement(vo)
.build()
);
break;
default:
LOGGER.warn("MovementEvent of type [{}] is currently not supported", event.getType());
}
}
}
|
def read_uniprot(verbose,file,uniprot_to_transporters):
uniref_to_uniprot={}
try:
file_handle=gzip.open(file)
line=file_handle.readline()
except EnvironmentError:
sys.exit("Unable to read file: " + file)
count=0
while line:
data=line.rstrip().split(COLUMN_DELIMITER)
count+=1
if len(data) == 2:
uniprot, uniref = data
if uniprot in uniprot_to_transporters:
if not uniref in uniref_to_uniprot:
uniref_to_uniprot[uniref]=set()
uniref_to_uniprot[uniref].add(uniprot)
if verbose and count % 100000 == 0:
print("Read "+str(count)+" lines")
line=file_handle.readline()
file_handle.close()
return uniref_to_uniprot |
// Validate ensures the CreateOrderMsg is valid
func (m CreateOrderMsg) Validate() error {
var errs error
errs = errors.AppendField(errs, "Metadata", m.Metadata.Validate())
errs = errors.AppendField(errs, "TraderID", m.Trader.Validate())
errs = errors.AppendField(errs, "OrderBookID", validateID(m.OrderBookID))
if m.Offer == nil {
errs = errors.AppendField(errs, "Offer", errors.ErrEmpty)
} else if err := m.Offer.Validate(); err != nil {
errs = errors.AppendField(errs, "Offer", err)
} else if !m.Offer.IsPositive() {
errs = errors.Append(errs,
errors.Field("Offer", errors.ErrInput, "offer must be positive"))
}
if err := m.Price.Validate(); err != nil {
errs = errors.AppendField(errs, "Price", err)
} else if !m.Price.IsPositive() {
errs = errors.Append(errs,
errors.Field("Price", errors.ErrInput, "price must be positive"))
}
return errs
} |
'use strict';
import * as path from 'path';
import * as fsutils from './Utilities/FsUtils';
import * as os from 'os';
import * as dns from 'dns';
import { IConfig, RunMode } from './IConfig';
import { argv as clArgs } from 'yargs';
export let config: IConfig = undefined;
const configFilePath: string = path.resolve( './config.json' );
function initConfig() {
config = fsutils.readJsonFile(configFilePath) as IConfig;
config.testContext = {
testName: clArgs['teststorun'],
testConfigName: clArgs['testconfig'],
browser: clArgs['browser'],
thresholdInMilliseconds: config.testContext.thresholdInMilliseconds,
testLogRoot: config.testContext.testLogRoot,
testRunOptions: clArgs['testrunoptions'],
timeoutInMinutes: clArgs['timeoutinminutes'],
reportMailFrom: config.testContext.reportMailFrom,
reportMailcc: config.testContext.reportMailcc
};
if (config.runContext.mode.toString() === 'full') {
config.runContext.mode = RunMode.full;
} else if (config.runContext.mode.toString() === 'generateLoad') {
config.runContext.mode = RunMode.generateLoad;
}
const hostName = os.hostname();
dns.lookup(hostName, function(err, ip) {
console.log('IP: ' + ip);
(<any>dns).lookupService(ip, 0, function (err, hostname, service) {
if (err) {
console.log('dns lookupService error: ', err);
} else {
config.enviroment.testRunEnviroment = hostname;
}
});
});
}
initConfig(); |
/**
this function should be called before the app. exits to stop
the service
*/
void NTService::Stop(void)
{
SetStatus(SERVICE_STOP_PENDING,NO_ERROR, 0, 1, 60000);
StopService();
SetStatus(SERVICE_STOPPED, NO_ERROR, 0, 1, 1000);
} |
// Print all successfully completed jobs
func printCompletedJobs(js []*libferry.Job) {
header := []string{
"Status",
"Completed",
"Duration",
"Execution time",
"Description",
}
table := tablewriter.NewWriter(os.Stdout)
table.SetHeader(header)
table.SetBorder(false)
i := 0
for _, j := range js {
if i >= maxPrintJobs && !allJobs {
break
}
i++
table.Append([]string{
"success",
j.Timing.End.Format("2006-01-02 15:04:05"),
j.TotalTime().String(),
j.ExecutionTime().String(),
j.Description,
})
}
table.Render()
} |
package main
import (
"github.com/chippydip/go-sc2ai/api"
"github.com/chippydip/go-sc2ai/botutil"
"github.com/chippydip/go-sc2ai/client"
"github.com/chippydip/go-sc2ai/runner"
)
func main() {
// Play a random map against a medium difficulty computer
runner.SetComputer(api.Race_Random, api.Difficulty_Medium, api.AIBuild_RandomBuild)
// Create the agent and then start the game
botutil.SetGameVersion()
agent := client.AgentFunc(runAgent)
runner.RunAgent(client.NewParticipant(api.Race_Protoss, agent, "ProbeRush"))
}
|
// NewGenerator create default implementation of `jobsolver.TCGenerator`
func NewGenerator() jobsolver.TCGenerator {
return &defaultGenerator{
sandbox: sandbox.New(),
compiler: compiler.New(),
}
} |
package com.codepath.rmulla.gridimagesearch.models;
import java.io.Serializable;
/**
* Created by rmulla on 9/24/15.
*/
public class ImageFilter implements Serializable{
public String imageSize;
public String colorFilter;
public String imageType;
public String siteFilter;
}
|
def _check_input_parameters(Hc, gamma, meta_classifier):
if not isinstance(Hc, (float, int)):
raise ValueError('Parameter Hc should be either a number. Currently Hc = {}'.format(type(Hc)))
if Hc < 0.5:
raise ValueError('Parameter Hc should be higher than 0.5. Currently Hc = {}'.format(Hc))
if not isinstance(gamma, float):
raise ValueError('Parameter Hc should be either a float. Currently Hc = {}'.format(type(Hc)))
if gamma < 0.5:
raise ValueError('Parameter gamma should be higher than 0.5. Currently gamma = {}'.format(gamma))
if meta_classifier is not None and "predict_proba" not in dir(meta_classifier):
raise ValueError("The meta-classifier should output probability estimates") |
def run(sniffer_instance=None, wait_time=0.5, clear=True, args=(),
debug=False):
if sniffer_instance is None:
sniffer_instance = ScentSniffer()
if debug:
scanner = Scanner(
sniffer_instance.watch_paths,
scent=sniffer_instance.scent, logger=sys.stdout)
else:
scanner = Scanner(
sniffer_instance.watch_paths, scent=sniffer_instance.scent)
sniffer_instance.set_up(tuple(args), clear, debug)
sniffer_instance.observe_scanner(scanner)
scanner.loop(wait_time) |
/**
* The Film class is responsible for writing
* the final image of the ray tracer to a JPG file
* @author Abdullah Emad
* @version 1.0
*/
public class Film {
private BufferedImage image;
private int nCommited;
private boolean[][] pixelCommited;
private String outputFileName = "out.png";
/**
*
* @param screenDimensions: The dimensions of the screen
*/
public Film(ScreenDimensions screenDimensions) {
int width = screenDimensions.getWidth();
int height = screenDimensions.getHeight();
image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
nCommited = 0;
pixelCommited = new boolean[height][width];
}
/**
*
* @param screenDimensions the dimensions of the screen
* @param outputFileName the output filename
*/
public Film(ScreenDimensions screenDimensions, String outputFileName){
this(screenDimensions);
this.outputFileName = outputFileName;
}
/**
* Commits a given color to the pixel specified by the sample
* @param sample A sample representing the x y coordinate of the pixel on the screen
* @param color The color to be written to this pixel
*/
public void commit(Sample sample, Color color) {
int i = sample.getX();
int j = sample.getY();
int c = 0xFF << 24 | ((int)(((long)0xFF) * color.getR())) << 16 | ((int)(((long)0xFF) * color.getG())) << 8 |
((int)(((long)0xFF) * color.getB()));
image.setRGB(i, j, c);
if(!pixelCommited[j][i]){
++nCommited;
pixelCommited[j][i] = true;
}
}
/**
* writes the Film into a jpg image file called named.jpg in the same directory
* @throws Exception throws an Exception if the image was not fully committed or an IOException occurs
*/
public void writeImage() throws Exception {
if(nCommited != (image.getWidth() * image.getHeight())){
throw new IncompleteImageException();
}
boolean ret = ImageIO.write(image, "png", new File(outputFileName));
if(!ret){
throw new IOException();
}
}
} |
// returns minimum capacity to hold
// size characters and count table
// entries, including alignment.
std::size_t
headers::
bytes_needed(
std::size_t size,
std::size_t count) noexcept
{
return align_up(size +
count * sizeof(
detail::fitem));
} |
<filename>src/utils/Assertion.ts
import { Assertion } from '../models/QuestionDetailResponse';
export const checkAssertion = (results: any[], code: string, assertions: Assertion[]) => {
const failedAssertions: string[] = [];
assertions.forEach((assertion) => {
switch (assertion.assertion) {
case "'+' in code":
if (code.search(/\+/) === -1) {
failedAssertions.push(assertion.id);
}
break;
case 'add(0, 0) === undefined':
if (results.includes(undefined)) {
failedAssertions.push(assertion.id);
}
break;
default:
break;
}
});
return failedAssertions;
};
|
Intracranial pressure changes during infusions of verapamil as compared with sodium nitroprusside.
S ODIUM nitroprusside and nitroglycerine increase intracranial pressure, and thus may compromise cerebral perfusion or even cause herniation of brain tissue in patients with an already elevated intracranial pressure. 1-4 In addition, cyanide toxicity, tachyphylaxis, and rebound hypertension have all been observed following sodium nitroprusside infusion.5-7 Verapamil, a calcium channel blocking agent, is effective in reducing blood pressure without some of the adverse effects of sodium nitroprusside.8 This study was designed to compare the changes in intracranial pressure and heart rate during verapamiland sodium nitroprusside-induced hypotension in normal cats as well as in cats with artificially elevated intracranial pressure. |
/**
* Referenced from: Dr. Marzieh Ahmadzadeh,
* ALSU Dragon Book (by: Alfred V. Aho,
* Monica S. Lam,
* Ravi Sethi,
* Jeffrey D. Ullman)<br/><br/>
* Symbol Object
* @author Michael Valdron
* @version March 6, 2017
*
*/
private static class Symbol {
public String name;
public String type;
public String value;
public Symbol(String n) {
name = new String(n);
type = null;
value = null;
}
public Symbol(String n, String t) {
name = new String(n);
type = new String(t);
value = null;
}
public Symbol(String n, String t, String v) {
name = new String(n);
type = new String(t);
value = new String(v);
}
} |
<filename>libs/laya/particle/shader/value/ParticleShaderValue.ts
import { ParticleShader } from "../ParticleShader"
import { Value2D } from "../../../webgl/shader/d2/value/Value2D";
import { RenderState2D } from "../../../webgl/utils/RenderState2D";
import { ShaderValue } from "../../../webgl/shader/ShaderValue";
/**
* @internal
*/
export class ParticleShaderValue extends Value2D {
private static pShader: ParticleShader = null; //new ParticleShader();
/*
public var a_CornerTextureCoordinate:Array=[4, WebGLContext.FLOAT, false, 116, 0];
public var a_Position:Array=[3, WebGLContext.FLOAT, false, 116, 16];
public var a_Velocity:Array=[3, WebGLContext.FLOAT, false, 116, 28];
public var a_StartColor:Array=[4, WebGLContext.FLOAT, false, 116, 40];
public var a_EndColor:Array=[4, WebGLContext.FLOAT, false, 116, 56];
public var a_SizeRotation:Array=[3, WebGLContext.FLOAT, false, 116, 72];
public var a_Radius:Array = [2, WebGLContext.FLOAT, false, 116, 84];
public var a_Radian:Array=[4, WebGLContext.FLOAT, false, 116, 92];
public var a_AgeAddScale:Array=[1, WebGLContext.FLOAT, false, 116, 108];
public var a_Time:Array=[1, WebGLContext.FLOAT, false, 116, 112];
*/
u_CurrentTime: number;
u_Duration: number;
u_Gravity: Float32Array; //v3
u_EndVelocity: number;
u_texture: any;
constructor() {
super(0, 0);
if (!ParticleShaderValue.pShader) {
ParticleShaderValue.pShader = new ParticleShader();
}
/* �ŵ� ParticleShader ����
this._attribLocation = ['a_CornerTextureCoordinate', 0, 'a_Position', 1, 'a_Velocity', 2, 'a_StartColor', 3,
'a_EndColor',4,'a_SizeRotation',5,'a_Radius',6,'a_Radian',7,'a_AgeAddScale',8,'a_Time',9];
*/
}
/**
* @override
*/
upload(): void {
var size: any[] = this.size;
size[0] = RenderState2D.width;
size[1] = RenderState2D.height;
this.alpha = this.ALPHA * RenderState2D.worldAlpha;
ParticleShaderValue.pShader.upload((<ShaderValue>this));
}
}
|
package com.williballenthin.HexView;
import javax.swing.*;
import javax.swing.border.BevelBorder;
import javax.swing.event.CaretEvent;
import javax.swing.event.CaretListener;
import javax.swing.text.BadLocationException;
import javax.swing.text.DefaultHighlighter;
import javax.swing.text.JTextComponent;
import java.awt.*;
import java.nio.ByteBuffer;
/**
* HexView is a standard three-paned hex editor widget that displays binary data.
*
* Note, this does not do any intelligent paging of the data. You should estimate it to load three strings
* with length equal to the given ByteBuffer. So its probably not good to use this view with large files.
*/
public class HexView extends JPanel implements CaretListener {
private final static int DEFAULT_BYTES_PER_LINE = 0x10;
private final static char[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
private final int _bytesPerLine;
private final ByteBuffer _buf;
private final JTextComponent _offsetView;
private final JTextComponent _hexView;
private final JTextComponent _asciiView;
private final JLabel _statusLabel;
private final Color _highlightColor;
private final DefaultHighlighter.DefaultHighlightPainter _highlighterPainter;
/**
* Uses the default 0x10 bytes per line.
*
* @param buf The binary data to display within this hex view.
*/
public HexView(ByteBuffer buf) {
this(buf, DEFAULT_BYTES_PER_LINE);
}
/**
* @param buf The binary data to display within this hex view.
* @param bytesPerLine The number of bytes to display per line.
*/
public HexView(ByteBuffer buf, int bytesPerLine) {
super(new BorderLayout());
this._buf = buf;
this._bytesPerLine = bytesPerLine;
Font font = new Font("Monospaced", Font.PLAIN, 12);
this._offsetView = new JTextArea();
this._hexView = new JTextArea();
this._asciiView = new JTextArea();
JPanel _statusView = new JPanel();
// status bar
_statusView.setBorder(new BevelBorder(BevelBorder.LOWERED));
this.add(_statusView, BorderLayout.SOUTH);
_statusView.setPreferredSize(new Dimension(this.getWidth(), 18));
_statusView.setLayout(new BoxLayout(_statusView, BoxLayout.X_AXIS));
this._statusLabel = new JLabel("");
this._statusLabel.setHorizontalAlignment(SwingConstants.LEFT);
_statusView.add(this._statusLabel);
// right panes are split
JSplitPane _splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, this._hexView, this._asciiView);
_splitPane.setResizeWeight(0.5);
_splitPane.setOneTouchExpandable(true);
_splitPane.setContinuousLayout(true);
// three panes sitting together
JPanel panes = new JPanel(new BorderLayout());
panes.add(this._offsetView, BorderLayout.WEST);
panes.add(_splitPane, BorderLayout.CENTER);
JScrollPane scroller = new JScrollPane(panes);
this.add(scroller, BorderLayout.CENTER);
_offsetView.setFont(font);
_hexView.setFont(font);
_asciiView.setFont(font);
StringBuilder offsetSB = new StringBuilder();
StringBuilder hexSB = new StringBuilder();
StringBuilder asciiSB = new StringBuilder();
buf.position(0x0);
for (int i = 0; i < buf.limit(); i++) {
if (i % this._bytesPerLine == 0x0) {
offsetSB.append(String.format("0x%x \n", i));
}
byte b = buf.get();
char[] hex = new char[3];
hex[0] = HEX_DIGITS[(b >>> 4) & 0x0F];
hex[1] = HEX_DIGITS[b & 0x0F];
hex[2] = ' ';
hexSB.append(hex);
if (b >= ' ' && b <= '~') {
asciiSB.append((char)b);
} else {
asciiSB.append('.');
}
if (i % this._bytesPerLine == this._bytesPerLine - 1) {
hexSB.append("\n");
asciiSB.append("\n");
}
}
this._offsetView.setText(offsetSB.toString());
this._hexView.setText(hexSB.toString());
this._asciiView.setText(asciiSB.toString());
this._hexView.addCaretListener(this);
this._asciiView.addCaretListener(this);
this._asciiView.setSelectedTextColor(this._asciiView.getForeground());
this._hexView.setSelectedTextColor(this._asciiView.getForeground());
this._highlightColor = this._hexView.getSelectionColor();
this._highlighterPainter = new DefaultHighlighter.DefaultHighlightPainter(this._highlightColor);
}
/**
* clearHighlight removes any colors applied to the text views.
*/
private void clearHighlight() {
this._asciiView.getHighlighter().removeAllHighlights();
this._hexView.getHighlighter().removeAllHighlights();
}
/**
* setHighlight colors the given byte range.
* @param startByte The starting byte index of the selection.
* @param endByte The ending byte index of the selection.
*/
private void setHighlight(int startByte, int endByte) {
int startRows = (startByte - (startByte % this._bytesPerLine)) / this._bytesPerLine;
int endRows = (endByte - (endByte % this._bytesPerLine)) / this._bytesPerLine;
this.clearHighlight();
try {
this._asciiView.getHighlighter().addHighlight(startByte + startRows, endByte + endRows, this._highlighterPainter);
this._hexView.getHighlighter().addHighlight((startByte * 3) + startRows, (endByte * 3) + endRows, this._highlighterPainter);
} catch (BadLocationException e1) {
System.out.println("bad location");
}
}
/**
* setSelection sets the given byte range as "selected", which from a GUI perspective means the
* bytes are highlighted, and the status bar updated.
* @param startByte The starting byte index of the selection.
* @param endByte The ending byte index of the selection.
*/
private void setSelection(int startByte, int endByte) {
this.setHighlight(startByte, endByte);
if (startByte != endByte) {
/**
* @param 1 Start
* @param 2 End
* @param 3 Len
*/
String statusTemplate = "Selection: %1$d to %2$d (len: %3$d) [0x%1$x to 0x%2$x (len: 0x%3$x)]";
this._statusLabel.setText(String.format(statusTemplate, startByte, endByte, endByte - startByte));
} else {
/**
* @param 1 Start
*/
String statusTemplate = "Position: %1$d [0x%1$x]";
this._statusLabel.setText(String.format(statusTemplate, startByte));
}
}
// these flags are used to ensure we don't end up in a circular event loop where
// one component fires an event on the other, who volley's it back.
private int _hexLastSelectionStart = 0;
private int _hexLastSelectionEnd = 0;
private int _asciiLastSelectionStart = 0;
private int _asciiLastSelectionEnd = 0;
@Override
public void caretUpdate(CaretEvent e) {
if (e.getMark() == e.getDot()) {
this.clearHighlight();
}
if (e.getSource() == this._asciiView) {
int startByte = e.getMark();
int endByte = e.getDot();
if (startByte > endByte) {
int t = endByte;
endByte = startByte;
startByte = t;
}
// the number of line endings before the start,end points
int startRows = (startByte - (startByte % this._bytesPerLine)) / this._bytesPerLine;
int endRows = (endByte - (endByte % this._bytesPerLine)) / this._bytesPerLine;
// the byte index of the start,end points in the ASCII view
startByte = startByte - startRows;
endByte = endByte - endRows;
// avoid the loop
if (_asciiLastSelectionStart == startByte && _asciiLastSelectionEnd == endByte) {
return;
}
_asciiLastSelectionStart = startByte;
_asciiLastSelectionEnd = endByte;
this.setSelection(startByte, endByte);
} else if (e.getSource() == this._hexView) {
int startByte = e.getMark();
int endByte = e.getDot();
if (startByte > endByte) {
int t = endByte;
endByte = startByte;
startByte = t;
}
// the number of line endings before the start,end points
int startRows = (startByte - (startByte % this._bytesPerLine)) / (3 * this._bytesPerLine);
int endRows = (endByte - (endByte % this._bytesPerLine)) / (3 * this._bytesPerLine);
// the byte index of the start,end points in the ASCII view
startByte = startByte - startRows;
startByte = startByte / 3;
endByte = endByte - endRows;
endByte = endByte / 3;
if (_hexLastSelectionStart == startByte && _hexLastSelectionEnd == endByte) {
return;
}
_hexLastSelectionStart = startByte;
_hexLastSelectionEnd = endByte;
this.setSelection(startByte, endByte);
} else {
System.out.println("from unknown");
}
}
}
|
use std::io::BufRead;
use std::str::FromStr;
use quick_xml::events::BytesStart;
use quick_xml::Reader;
use crate::error::Error;
use crate::error::InvalidValue;
use crate::parser::utils::attributes_to_hashmap;
use crate::parser::utils::decode_attribute;
use crate::parser::utils::get_evidences;
use crate::parser::FromXml;
#[derive(Debug, Clone)]
/// Describes the location where a feature can be found within a sequence.
pub enum FeatureLocation {
Range(Position, Position),
Position(Position),
}
impl FromXml for FeatureLocation {
fn from_xml<B: BufRead>(
event: &BytesStart,
reader: &mut Reader<B>,
buffer: &mut Vec<u8>,
) -> Result<Self, Error> {
debug_assert_eq!(event.local_name(), b"location");
let mut optbegin: Option<Position> = None;
let mut optend: Option<Position> = None;
let mut optposition: Option<Position> = None;
parse_inner! {event, reader, buffer,
e @ b"begin" => {
let pos = Position::from_xml(&e, reader, buffer)?;
if optbegin.replace(pos).is_some() {
return Err(Error::DuplicateElement("begin", "location"));
}
},
e @ b"end" => {
let pos = Position::from_xml(&e, reader, buffer)?;
if optend.replace(pos).is_some() {
return Err(Error::DuplicateElement("end", "location"));
}
},
e @ b"position" => {
let pos = Position::from_xml(&e, reader, buffer)?;
if optposition.replace(pos).is_some() {
return Err(Error::DuplicateElement("position", "location"));
}
}
}
if let Some(pos) = optposition {
if optbegin.is_some() {
Err(Error::DuplicateElement("begin", "location"))
} else if optend.is_some() {
Err(Error::DuplicateElement("end", "location"))
} else {
Ok(FeatureLocation::Position(pos))
}
} else {
let begin = optbegin.ok_or(Error::MissingElement("begin", "location"))?;
let end = optend.ok_or(Error::MissingElement("end", "location"))?;
Ok(FeatureLocation::Range(begin, end))
}
}
}
// ---------------------------------------------------------------------------
#[derive(Debug, Clone)]
pub struct Position {
pub pos: Option<usize>,
pub status: Status,
pub evidence: Vec<usize>,
}
impl FromXml for Position {
fn from_xml<B: BufRead>(
event: &BytesStart,
reader: &mut Reader<B>,
buffer: &mut Vec<u8>,
) -> Result<Self, Error> {
debug_assert!(
event.local_name() == b"begin"
|| event.local_name() == b"end"
|| event.local_name() == b"position"
);
let attr = attributes_to_hashmap(event)?;
let status = match decode_attribute(event, reader, "status", "position") {
Ok(status) => status,
Err(Error::MissingAttribute(_, _)) => Status::default(),
Err(other) => return Err(other),
};
let evidence = get_evidences(reader, &attr)?;
let pos = attr
.get(&b"position"[..])
.map(|x| x.unescape_and_decode_value(reader))
.transpose()?
.map(|x| usize::from_str(&x))
.transpose()?;
reader.read_to_end(event.local_name(), buffer)?;
Ok(Position {
pos,
status,
evidence,
})
}
}
// ---------------------------------------------------------------------------
#[derive(Debug, Clone)]
pub enum Status {
Certain,
Uncertain,
LessThan,
GreaterThan,
Unknown,
}
impl Default for Status {
fn default() -> Self {
Status::Certain
}
}
impl FromStr for Status {
type Err = InvalidValue;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"certain" => Ok(Status::Certain),
"uncertain" => Ok(Status::Uncertain),
"less than" => Ok(Status::LessThan),
"greater than" => Ok(Status::GreaterThan),
"unknown" => Ok(Status::Unknown),
other => Err(InvalidValue::from(other)),
}
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package model;
/**
*
* @author msi
*/
public class Lloc {
int lloc;
String lloc_bloc;
String lloc_passadis;
String lloc_lleixa;
public Lloc(int lloc, String lloc_bloc, String lloc_passadis, String lloc_lleixa) {
this.lloc = lloc;
this.lloc_bloc = lloc_bloc;
this.lloc_passadis = lloc_passadis;
this.lloc_lleixa = lloc_lleixa;
}
public int getLloc() {
return lloc;
}
public String getLloc_bloc() {
return lloc_bloc;
}
public String getLloc_passadis() {
return lloc_passadis;
}
public String getLloc_lleixa() {
return lloc_lleixa;
}
public void setLloc(int lloc) {
this.lloc = lloc;
}
public void setLloc_bloc(String lloc_bloc) {
this.lloc_bloc = lloc_bloc;
}
public void setLloc_passadis(String lloc_passadis) {
this.lloc_passadis = lloc_passadis;
}
public void setLloc_lleixa(String lloc_lleixa) {
this.lloc_lleixa = lloc_lleixa;
}
public Lloc() {
}
}
|
/**
* Wrapper to ensure task executes on the runtime thread
*
* @param locationProviders list of location providers to enable by registering
* the providers with the OS
*/
public void enableLocationProviders(final HashMap<String, LocationProviderProxy> locationProviders)
{
if (!TiApplication.isUIThread()) {
runInUiThread(new CommandNoReturn() {
@Override
public void execute() {
enableLocationProviders(locationProviders);
}
}, false);
return;
}
doEnableLocationProviders(locationProviders);
} |
<reponame>liuhc8/Aperture<gh_stars>10-100
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.concurrent.ExecutionException;
import org.apache.commons.cli.*;
public class ApertureMain {
public final static String APERTURE_VERSION="1.2";
public static int lCode,lPos,rCode,rPos;
public static boolean debug=false;
//public static Integer sync=new Integer(0);
private void readClassify(String r1,String r2,String index,String workDir,String projectName,
int r1BarStart,int r2BarStart,int r1BarLen,int r2BarLen,int r1InsStart,int r2InsStart,int nthreads,int mergeK,
int minIndelLen,double similarity,double maxBpScore,String[] commands)
throws FileNotFoundException, IllegalBioFileException, IllegalThreadNumberException,
IOException, InterruptedException, ExecutionException, ClassNotFoundException, IllegalPositionException
{
Path r1Path=Paths.get(r1);
ensureFastqFileExist(r1Path);
Path r2Path=Paths.get(r2);
ensureFastqFileExist(r1Path);
Path ttPath=Paths.get(index+".tt");
Path kcPath=Paths.get(index+".km");
Path longkcPath=Paths.get(index+".long.km");
Path spacedkcPath=Paths.get(index+".spaced.km");
Path ciPath=Paths.get(index+".ci");
ensureIndexFileExist(ttPath);
ensureIndexFileExist(kcPath);
ensureIndexFileExist(longkcPath);
ensureIndexFileExist(spacedkcPath);
ensureIndexFileExist(ciPath);
Path outDir=Paths.get(workDir);
tryCreateWorkDir(outDir);
Path outVcfPath=Paths.get(workDir,projectName+".sv.vcf.gz");
Path mergedDataPath=Paths.get(workDir,projectName+".bp.dat");
Path[] tempDataList=createTempDataPaths(nthreads,workDir,projectName);
Path[] tempIndexList=createTempIndexPaths(nthreads,workDir,projectName);
long time0=System.currentTimeMillis();
System.out.print("Loading Reference...");
ReadClassifier classifier=new ReadClassifier(r1Path,r2Path,kcPath,longkcPath,spacedkcPath,nthreads,tempDataList,tempIndexList,
r1BarStart,r2BarStart,r1BarLen,r2BarLen,r1InsStart,r2InsStart);
classifier.loadRef();
System.out.println("Done!");
long time1=System.currentTimeMillis();
long interval1=(time1-time0)/1000;
System.out.print("K-mer Based Searching...");
int[] blockCntList=classifier.classify();
classifier.clean();
classifier=null;
System.out.println("Done!");
long time2=System.currentTimeMillis();
long interval2=(time2-time1)/1000;
System.out.print("Sorting Candidates...");
ResultsSorter sorter=new ResultsSorter(tempDataList,tempIndexList,nthreads,mergedDataPath,blockCntList);
BpIndexList finalIndexlist=sorter.sortResults();
tryDeleteFiles(tempDataList);
tryDeleteFiles(tempIndexList);
tempDataList=null;
tempIndexList=null;
sorter=null;
System.out.println("Done!");
long time3=System.currentTimeMillis();
long interval3=(time3-time2)/1000;
System.out.print("Clustering Breakpoint Candidates...");
BpClusterManager mergeManager=new BpClusterManager(ttPath,workDir,projectName,nthreads,mergeK,similarity,mergedDataPath);
SVCollection svCollection=mergeManager.mergeBreakpoints(finalIndexlist);
tryDeleteAFile(mergedDataPath);
mergedDataPath=null;
mergeManager=null;
svCollection.merge();
Object[] vcfRecordMatrix=svCollection.transAndFilter(r1BarLen>0&&r2BarLen>0,minIndelLen,maxBpScore);
svCollection.clean();
svCollection=null;
System.out.println("Done!");
long time4=System.currentTimeMillis();
long interval4=(time4-time3)/1000;
System.out.print("Filtering And Saving...");
// fw.write("Loading Reference: "+Long.toString(interval1)+"s\n");
// fw.write("Classifying Reads: "+Long.toString(interval2)+"s\n");
// fw.write("Merging Reads: "+Long.toString(interval3)+"s\n");
// fw.write("Merging Breakpoints: "+Long.toString(interval4)+"s\n");
// fw.write("threads: "+Integer.toString(nthreads)+"s\n");
VCFWriter vcfWriter=new VCFWriter(ciPath,outVcfPath,projectName,commands,vcfRecordMatrix);
try {
vcfWriter.sortAndWriteVcf();
vcfWriter.flush();
}finally {
vcfWriter.close();
}
vcfWriter=null;
for(int i=0,len=vcfRecordMatrix.length;i<len;++i) {
ArrayList<VCFRecord> vcfList=(ArrayList<VCFRecord>)vcfRecordMatrix[i];
vcfList.clear();
vcfRecordMatrix[i]=null;
}
vcfRecordMatrix=null;
System.out.println("Done!");
}
private void databaseBuild(String fasta,String vcf,String save,int nWorkers,int k,int longk,int spacedk,int jump,int minSegLen,int maxSegLen,int geneCodeLen) throws IllegalPositionException, IllegalBioFileException, IOException, InterruptedException {
Path fastaPath=Paths.get(fasta);
Path faiPath=Paths.get(fasta+".fai");
Path vcfPath=Paths.get(vcf);
ensureIndexFileExist(fastaPath);
ensureIndexFileExist(faiPath);
ensureIndexFileExist(vcfPath);
Path ttPath=Paths.get(save+".tt");
Path kcPath=Paths.get(save+".km");
Path longkcPath=Paths.get(save+".long.km");
Path spacedkcPath=Paths.get(save+".spaced.km");
Path ciPath=Paths.get(save+".ci");
DatabaseBuilder builder=new DatabaseBuilder(fastaPath,faiPath,vcfPath,ttPath,kcPath,longkcPath,spacedkcPath,ciPath,nWorkers,k,longk,spacedk,jump,minSegLen,maxSegLen,geneCodeLen);
builder.buildDatabase();
}
public static void main(String[] args) {
System.out.println("Aperture (Version: "+APERTURE_VERSION+")");
ApertureMain aperture=new ApertureMain();
if(args.length==0) {
aperture.showApertureHelp();
}else if(args[0].equals("index")) {
try {
aperture.parseArgForIndex(args,aperture);
}catch(Exception e){
e.printStackTrace();
}
aperture.showArgs(args);
}else if(args[0].equals("call")) {
try {
aperture.parseArgForCall(args,aperture);
}catch(Exception e) {
e.printStackTrace();
}
aperture.showArgs(args);
}else {
System.out.println("Invalid command: " + args[0]+" !");
aperture.showApertureHelp();
}
}
private void showApertureHelp() {
System.out.println("Aperture Help");
System.out.println("Description: Alignment-free detection of structural variations and viral integrations in circulating tumor DNA");
System.out.println("Contact: <NAME> <<EMAIL>>");
System.out.println("");
System.out.println("Usage: java -jar aperture.jar <command> <arguments>");
System.out.println("");
System.out.println("Commands:");
System.out.println("");
System.out.println(" index Build index for Aperture");
System.out.println(" call Discover SVs and viral integrations");
System.out.println("");
}
private void parseArgForIndex(String[] args,ApertureMain aperture) throws ParseException {
Options options=new Options();
Option opt = new Option("h", "help", false, "Show help message");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("R", "reference", true, "Genome FASTA file with fai index");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("V", "vcf", true, "Common SNPs from dbSNP database in VCF format");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("O", "out", true, "Output path");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("T", "threads", true, "Number of threads");
opt.setType(Integer.TYPE);
opt.setRequired(true);
options.addOption(opt);
CommandLine commandLine = null;
CommandLineParser parser = new DefaultParser();
HelpFormatter hf = new HelpFormatter();
hf.setWidth(150);
try {
commandLine = parser.parse(options, args);
if (commandLine.hasOption('h') || commandLine.hasOption("help")) {
hf.printHelp("java -jar aperture.jar index -R <genome.fa> -V <snp.vcf> -O <out> -T <threads>", options, false);
}else {
try {
long time1=System.currentTimeMillis();
aperture.databaseBuild(commandLine.getOptionValue("R"),commandLine.getOptionValue("V"),commandLine.getOptionValue("O"),Integer.parseInt(commandLine.getOptionValue("T")),23,41,83,3,30000,65000,5);
long time2=System.currentTimeMillis();
long interval=(time2-time1)/1000;
System.out.println("Index building workflow successfully completed.");
System.out.println("Elapsed time: "+interval+"s");
}catch(Exception e){
e.printStackTrace();
}
}
}catch(ParseException e) {
System.out.println( "Unexpected exception:" + e.getMessage() );
hf.printHelp("java -jar aperture.jar index -R <genome.fa> -V <snp.vcf> -O <out> -T <threads>", options, false);
}
}
private void parseArgForCall(String[] args,ApertureMain aperture) throws ParseException {
Options options=new Options();
Option opt = new Option("H", "help", false, "Show help message");
opt.setRequired(false);
options.addOption(opt);
if(debug) {
opt = new Option("LC", true, "debug:left code");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("LP", true, "debug:left pos");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("RC", true, "debug:right code");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("RP", true, "debug:right pos");
opt.setRequired(false);
options.addOption(opt);
}
opt = new Option("1", "r1", true, "Path of R1.fq.gz");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("2", "r2", true, "Path of R2.fq.gz");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("I", "index", true, "Path of Aperture index files");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("D", "dir", true, "Output path");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("P", "project", true, "Project name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("1BS", "r1BarStart", true, "Barcode start index in R1 (0-based)");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("2BS", "r2BarStart", true, "Barcode start index in R2 (0-based)");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("1BL", "r1BarLen", true, "Length of barcode in R1");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("2BL", "r2BarLen", true, "Length of barcode in R2");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("1S", "r1InsStart", true, "ctDNA fragment start index in R1 (0-based)");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("2S", "r2InsStart", true, "ctDNA fragment start index in R2 (0-based)");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("T", "threads", true, "Number of threads");
opt.setType(Integer.TYPE);
opt.setRequired(true);
options.addOption(opt);
CommandLine commandLine = null;
CommandLineParser parser = new DefaultParser();
HelpFormatter hf = new HelpFormatter();
hf.setWidth(150);
try {
commandLine = parser.parse(options, args);
if (commandLine.hasOption('h') || commandLine.hasOption("help")) {
hf.printHelp("java -jar aperture.jar call", options, true);
}else {
if(debug) {
if (commandLine.hasOption("LC") && commandLine.hasOption("RC")) {
lCode=Integer.parseInt(commandLine.getOptionValue("LC"));
rCode=Integer.parseInt(commandLine.getOptionValue("RC"));
lPos=Integer.parseInt(commandLine.getOptionValue("LP"));
rPos=Integer.parseInt(commandLine.getOptionValue("RP"));
}
}
try {
String indexPath=commandLine.getOptionValue("I");
int r1BarStart=Integer.parseInt(commandLine.getOptionValue("1BS"));
int r2BarStart=Integer.parseInt(commandLine.getOptionValue("2BS"));
int r1BarLen=Integer.parseInt(commandLine.getOptionValue("1BL"));
int r2BarLen=Integer.parseInt(commandLine.getOptionValue("2BL"));
int r1InsStart=Integer.parseInt(commandLine.getOptionValue("1S"));
int r2InsStart=Integer.parseInt(commandLine.getOptionValue("2S"));
int threads=Integer.parseInt(commandLine.getOptionValue("T"));
long time1=System.currentTimeMillis();
aperture.readClassify(commandLine.getOptionValue("1"),commandLine.getOptionValue("2"),indexPath,
commandLine.getOptionValue("D"),commandLine.getOptionValue("P"),
r1BarStart,r2BarStart,r1BarLen,r2BarLen,r1InsStart,r2InsStart,threads,11,50,0.25,3.0,args);
long time2=System.currentTimeMillis();
long interval=(time2-time1)/1000;
System.out.println("SV calling workflow successfully completed.");
System.out.println("Elapsed time: "+interval+"s");
}catch(Exception e){
e.printStackTrace();
}
}
}catch(ParseException e) {
System.out.println( "Unexpected exception:" + e.getMessage() );
hf.printHelp("java -jar aperture.jar call", options, true);
}
}
private void showArgs(String[] args) {
System.out.print("Args: java -jar aperture.jar ");
for(String arg:args) {
System.out.print(arg+" ");
}
System.out.println();
}
private void ensureIndexFileExist(Path path) throws FileNotFoundException {
if(!Files.exists(path)) {
throw new FileNotFoundException("Index file: "+path.toString()+" cannot be found!");
}
}
private void ensureFastqFileExist(Path path) throws FileNotFoundException {
if(!Files.exists(path)) {
throw new FileNotFoundException("FastQ file: "+path.toString()+" cannot be found!");
}
}
private Path[] createTempDataPaths(int threads,String workDir,String projectName) {
Path[] pathList=new Path[threads];
for(int i=0;i<threads;++i) {
pathList[i]=Paths.get(workDir, projectName + "." + i + ".tmp");
}
return pathList;
}
private Path[] createTempIndexPaths(int threads,String workDir,String projectName) {
Path[] pathList=new Path[threads];
for(int i=0;i<threads;++i) {
pathList[i]=Paths.get(workDir, projectName + "." + i + ".tmp.idx");
}
return pathList;
}
private void tryCreateWorkDir(Path dir) throws IOException {
try{
Files.createDirectory(dir);
System.out.println("Working directory: "+dir.toString()+" is created");
}catch(FileAlreadyExistsException e) {
if(Files.isDirectory(dir)) {
System.out.println("Working directory: "+dir.toString()+" already exists");
}else {
throw e;
}
}
}
private void tryDeleteFiles(Path[] pathList) {
for(int i=0,len=pathList.length;i<len;++i) {
if(pathList[i]!=null) {
try {
Files.delete(pathList[i]);
}catch(IOException e) {
e.printStackTrace();
}finally {
pathList[i]=null;
}
}
}
}
private void tryDeleteAFile(Path path) {
try {
Files.delete(path);
}catch(IOException e) {
e.printStackTrace();
}
}
}
|
/**
* @author: Alexander Zagniotov
* Created: 4/25/13 11:16 PM
*/
public class NullHandlingStrategy implements AdminResponseHandlingStrategy {
@Override
public void handle(final HttpServletRequest request, final HttpServletResponseWithGetStatus wrapper, final StubbedDataManager stubbedDataManager) throws IOException {
wrapper.setStatus(HttpStatus.NOT_IMPLEMENTED_501);
wrapper.getWriter().println(String.format("Method %s is not implemented on URI %s", request.getMethod(), request.getRequestURI()));
}
} |
<commit_msg>Make xmvn-resolve print resolved artifact files
<commit_before>/*-
* Copyright (c) 2012 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fedoraproject.maven.tools.resolver;
import java.util.StringTokenizer;
import org.fedoraproject.maven.resolver.DefaultResolver;
import org.fedoraproject.maven.resolver.Resolver;
import org.fedoraproject.maven.resolver.SystemResolver;
public class ResolverCli
{
public static void main( String[] args )
{
Resolver resolver = new DefaultResolver();
for ( String arg : args )
{
StringTokenizer tok = new StringTokenizer( arg, ":" );
String groupId = tok.nextToken();
String artifactId = tok.nextToken();
String version = tok.hasMoreTokens() ? tok.nextToken() : "SYSTEM";
String extension = tok.hasMoreTokens() ? tok.nextToken() : "pom";
resolver.resolve( groupId, artifactId, version, extension );
}
SystemResolver.printInvolvedPackages();
}
}
<commit_after>/*-
* Copyright (c) 2012 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fedoraproject.maven.tools.resolver;
import java.io.File;
import java.util.StringTokenizer;
import org.fedoraproject.maven.resolver.DefaultResolver;
import org.fedoraproject.maven.resolver.Resolver;
import org.fedoraproject.maven.resolver.SystemResolver;
public class ResolverCli
{
public static void main( String[] args )
{
Resolver resolver = new DefaultResolver();
for ( String arg : args )
{
StringTokenizer tok = new StringTokenizer( arg, ":" );
String groupId = tok.nextToken();
String artifactId = tok.nextToken();
String version = tok.hasMoreTokens() ? tok.nextToken() : "SYSTEM";
String extension = tok.hasMoreTokens() ? tok.nextToken() : "pom";
File file = resolver.resolve( groupId, artifactId, version, extension );
System.out.println( file );
}
SystemResolver.printInvolvedPackages();
}
}
|
<filename>render/engine/template/view.go<gh_stars>0
// Copyright 2014 <NAME>. All rights reserved.
// Use of this source code is governed by a MIT style
// license that can be found in the LICENSE file.
package template
import (
"html/template"
"io"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"strings"
"sync"
)
type htmlEngine struct {
sync.Once
*template.Template
funcMap template.FuncMap
reload bool
viewDir string
ext string
}
func New(viewDir, ext string, reload bool) *htmlEngine {
if len(viewDir) == 0 || len(ext) == 0 {
panic("viewDir or ext cannot be empty")
}
html := &htmlEngine{
reload: reload,
funcMap: template.FuncMap{},
ext: ext,
}
var err error
html.viewDir, err = filepath.Abs(viewDir)
if err != nil {
panic(err)
}
html.Template = template.New(html.viewDir)
return html
}
func (t *htmlEngine) walk() {
t.Do(func() {
if err := filepath.Walk(t.viewDir, func(targetPath string, info os.FileInfo, err error) error {
if info != nil && !info.IsDir() && strings.HasSuffix(info.Name(), t.ext) {
relPath, err := filepath.Rel(t.viewDir, targetPath)
if err != nil {
return err
}
buf, err := ioutil.ReadFile(targetPath)
if err != nil {
panic(err)
}
_, err = t.Template.New(strings.Replace(relPath, "\\", "/", -1)).Funcs(t.funcMap).Parse(string(buf))
if err != nil {
panic(err)
}
}
return nil
}); err != nil {
panic(err)
}
})
}
func (t *htmlEngine) Ext() string {
return t.ext
}
func (t *htmlEngine) AddFunc(funcName string, funcEntry interface{}) {
if reflect.ValueOf(funcEntry).Kind() == reflect.Func {
t.funcMap[funcName] = funcEntry
}
}
func (t *htmlEngine) HTML(writer io.Writer, name string, binding map[string]interface{}) error {
if t.reload {
funcs := t.funcMap
t = New(t.viewDir, t.ext, t.reload)
t.funcMap = funcs
}
t.walk()
return t.Template.ExecuteTemplate(writer, filepath.ToSlash(name), binding)
}
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 SMHI, Swedish Meteorological and Hydrological Institute
# License: MIT License (see LICENSE.txt or http://opensource.org/licenses/mit).
import codecs
import datetime
import logging
import logging.config
import os
import re
import time
import numpy as np
import sharkpylib
from sharkpylib import mappinglib
from sharkpylib.file import txt_reader
from sharkpylib.file.file_handlers import Directory
from sharkpylib.file.file_handlers import ListDirectory
from sharkpylib.file.file_handlers import MappingDirectory
from sharkpylib.qc.mask_areas import MaskAreasDirectory
try:
import pandas as pd
except:
pass
import sys
parent_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if parent_directory not in sys.path:
sys.path.append(parent_directory)
from sharkpylib import gismo
class TavastlandException(Exception):
"""
Blueprint for error message.
code is for external mapping of exceptions. For example if a GUI wants to
handle the error text for different languages.
"""
code = None
message = ''
def __init__(self, message='', code=''):
self.message = '{}: {}'.format(self.message, message)
if code:
self.code = code
class TavastlandExceptionCorrupedFile(TavastlandException):
"""
"""
code = ''
message = 'Corruped file'
class TavastlandExceptionNoCO2data(TavastlandException):
"""
"""
code = ''
message = ''
class TavastlandExceptionNoMatchWhenMerging(TavastlandException):
"""
"""
code = ''
message = ''
class File(object):
def __init__(self, file_path='', **kwargs):
self._set_logger(kwargs.get('logger'))
self.file_path = file_path
self.file_directory = os.path.dirname(self.file_path)
self.file_name = os.path.basename(self.file_path)
self.file_id = self.file_name
self.df = pd.DataFrame()
self.time_start = None
self.time_end = None
self.data_loaded = None
self.time_in_file_name_formats = ['TP_%Y%m%d%H%M%S.mit']
self._add_file_path_time()
self.time_frozen_between = []
if kwargs.get('load_file'):
self.load_file()
def _set_logger(self, logger):
if logger:
self.logger = logger
else:
logging.config.fileConfig('logging.conf')
self.logger = logging.getLogger('timedrotating')
def _len_header_equals_len_data(self, file_path):
with open(file_path) as fid:
for r, line in enumerate(fid):
split_line = line.split('\t')
if r==0:
header = split_line
else:
if len(header) == len(split_line):
return True
return False
def _add_file_path_time(self):
self.file_path_time = None
self.file_path_year = None
self.file_path_possible_years = []
for time_format in self.time_in_file_name_formats:
try:
time_object = datetime.datetime.strptime(self.file_name, time_format)
self.file_path_time = time_object
break
except ValueError:
# logger.debug('No time in file path for file: {}'.format(self.file_path))
pass
# Find year
result = re.findall('\d{4}', self.file_name)
if result:
self.file_path_year = int(result[0])
self.file_path_possible_years = [self.file_path_year-1, self.file_path_year, self.file_path_year+1]
def _delete_columns(self):
if 'Date' in self.df.columns:
self.df.drop(['Date'], axis=1, inplace=True)
# Time is removed in method _add_columns
elif 'PC Date' in self.df.columns:
self.df.drop(['PC Date', 'PC Time'], axis=1, inplace=True)
if 'Lat' in self.df.columns:
self.df.drop(['Lat', 'Lon'], axis=1, inplace=True)
elif 'latitude' in self.df.columns:
self.df.drop(['latitude', 'longitude'], axis=1, inplace=True)
def _add_columns(self):
# Time
if 'Date' in self.df.columns:
time_str = self.df['Date'] + ' ' + self.df['Time'].copy()
self.df.drop('Time', axis=1, inplace=True)
self.df['time'] = pd.to_datetime(time_str, format='%d.%m.%Y %H:%M:%S')
elif 'PC Date' in self.df.columns:
time_str = self.df['PC Date'] + ' ' + self.df['PC Time']
self.df['time'] = pd.to_datetime(time_str, format='%d/%m/%y %H:%M:%S')
# Position
if 'Lat' in self.df.columns:
self.df['lat'] = self.df['Lat'].apply(as_float)
self.df['lon'] = self.df['Lon'].apply(as_float)
elif 'latitude' in self.df.columns:
self.df['lat'] = self.df['latitude'].apply(as_float)
self.df['lon'] = self.df['longitude'].apply(as_float)
else:
self.df['lat'] = np.nan
self.df['lon'] = np.nan
self.df['source_file'] = self.file_name
def _remove_duplicates(self):
# print('REMOVE DUPLICATES', self.file_id)
# First save missing periodes
dub_boolean = self.df.duplicated('time', keep=False)
between = []
missing_period = []
for i, t0, b0, t1, b1 in zip(self.df.index[:-1], self.df['time'].values[:-1], dub_boolean.values[:-1],
self.df['time'].values[1:], dub_boolean.values[1:]):
if i == 0 and b0:
missing_period.append('?')
if b1 and not b0:
# t0s = pd.to_datetime(t0).strftime('%Y%m%d%H%M%S')
# missing_period.append(t0s)
missing_period.append(t0)
elif b0 and not b1:
# t1s = pd.to_datetime(t1).strftime('%Y%m%d%H%M%S')
# missing_period.append(t1s)
missing_period.append(t1)
# print(missing_period)
if len(missing_period) == 2:
between.append(missing_period)
# between.append('-'.join(missing_period))
missing_period = []
if missing_period:
missing_period.append('?')
between.append(missing_period)
# between.append('-'.join(missing_period))
# print('between:', len(between))
self.time_frozen_between = between
# Now drop all duplicates
self.df.drop_duplicates('time', keep=False, inplace=True)
def valid_data_line(self, line):
if 'DD.MM.YYYY' in line:
# print('DD.MM.YYYY', self.file_path)
return False
if not line.strip():
# print('BLANK', self.file_path)
return False
return True
def load_file(self, **kwargs):
if not os.path.exists(self.file_path):
raise FileNotFoundError
header = []
data = []
with codecs.open(self.file_path, encoding=kwargs.get('encoding', 'cp1252')) as fid:
for row, line in enumerate(fid):
split_line = line.strip('\n\r').split(kwargs.get('sep', '\t'))
split_line = [item.strip() for item in split_line]
if row == 1 and header:
if len(header) != len(split_line):
header = header[:len(split_line)]
if not header:
header = split_line
else:
if len(header) != len(split_line):
raise TavastlandExceptionCorrupedFile
self.logger.warning('invalid file: {}'.format(row, self.file_path))
self.data_loaded = False
return False
if not self.valid_data_line(line):
self.logger.warning('Removing invalid line {} from file: {}'.format(row, self.file_path))
continue
data.append(split_line)
self.original_columns = header[:]
self.df = pd.DataFrame(data, columns=header)
self._add_columns()
self._remove_duplicates()
self.filter_data()
self._delete_columns()
self.data_loaded = True
return True
def filter_data(self):
"""
Filters the data from unwanted lines etc.
:return:
"""
combined_keep_boolean = pd.Series([True]*len(self.df))
keep_boolean = ~self.df[self.original_columns[0]].str.contains('DD.MM.YYYY')
combined_keep_boolean = combined_keep_boolean & keep_boolean
keep_boolean = ~self.df[self.original_columns[0]].str.contains('.1904')
combined_keep_boolean = combined_keep_boolean & keep_boolean
keep_boolean = self.df['time'] <= datetime.datetime.now()
combined_keep_boolean = combined_keep_boolean & keep_boolean
removed = self.df.loc[~combined_keep_boolean]
if len(removed):
self.logger.warning('{} lines removed from file {}'.format(len(removed), self.file_path))
self.df = self.df.loc[combined_keep_boolean, :]
def clean_file(self, export_directory):
"""
Loads file (including filter data) and saves to the export directory.
:return
"""
# print(export_directory)
if export_directory == self.file_directory:
raise TavastlandException('Cannot export to the same directory!')
if not os.path.exists(export_directory):
os.makedirs(export_directory)
if self.data_loaded is None:
self.load_file()
export_file_path = os.path.join(export_directory, self.file_name)
self.df[self.original_columns].to_csv(export_file_path, index=False, sep='\t')
def get_df(self):
if self.data_loaded is None:
self.load_file()
return self.df
def get_time_range(self):
def get_time(line):
date = re.findall('\d{2}\.\d{2}\.\d{4}', line)
time = re.findall('\d{2}:\d{2}:\d{2}', line)
if date and time:
return datetime.datetime.strptime(date[0] + time[0], '%d.%m.%Y%H:%M:%S')
date = re.findall('\d{2}/\d{2}/\d{2}', line)
time = re.findall('\d{2}:\d{2}:\d{2}', line)
if date and time:
return datetime.datetime.strptime(date[0] + time[0], '%d/%m/%y%H:%M:%S')
self.time_start = None
self.time_end = None
if self.data_loaded:
self.time_start = self.df.time.values[0]
self.time_end = self.df.time.values[-1]
return self.time_start, self.time_end
else:
with codecs.open(self.file_path) as fid:
for r, line in enumerate(fid):
if self.valid_data_line(line):
if r == 0:
continue
elif not self.time_start:
time = get_time(line)
self.time_start = time
self.time_end = get_time(line)
return self.time_start, self.time_end
def in_time_range(self, datetime_object):
if not self.time_start:
self.get_time_range()
return (datetime_object >= self.time_start) & (datetime_object <= self.time_end)
def check_if_valid_file_name(self):
"""
External method.
Returns True if file_name follows the structure(s) described in method.
:param file_name:
:return:
"""
raise NotImplementedError
def warnings(self):
"""
Returns a list of strange things found in file. Strange things kan be handled.
:return: list with description of the warnings.
"""
raise NotImplementedError
def get_file_errors(self):
"""
Returns a list of errors in file if any. Errors are obvious faults that can not be handled.
:return list with description of the errors.
"""
raise NotImplementedError
def _get_file_errors(self):
error_list = []
if not self._len_header_equals_len_data(self.file_path):
text = 'Header is not the same length as data in file: {}.'.format(self.file_name)
error_list.append(text)
return error_list
class MITfile(File):
def __init__(self, file_path='', **kwargs):
File.__init__(self, file_path, **kwargs)
def check_if_valid_file_name(self, file_name):
"""
External method.
Returns True if file_name follows the structure(s) described in method.
:param file_name:
:return:
"""
if not file_name.endswith('.mit'):
return False
return True
def warnings(self):
"""
Returns a list of strange things found in file. Strange things kan be handled.
:return: list with description of the warnings.
"""
raise NotImplementedError
def get_file_errors(self):
"""
Returns a list of errors in file if any. Errors are obvious faults that can not be handled.
:return list with description of the errors.
"""
error_list = self._get_file_errors()
# Check time
start, end = self.get_time_range()
d = datetime.datetime(1980, 1, 1)
this_year = datetime.datetime.now().year
if not all([start, end]):
text = 'Could not find time in file {}.'.format(self.file_name)
error_list.append(text)
else:
if start < d:
text = 'Start data is too early in file {}. Before {}'.format(self.file_name, d.strftime('%Y%m%d'))
error_list.append(text)
# continue
if start > end:
text = 'Start time > end time in file {}.'.format(self.file_name)
error_list.append(text)
# continue
if any([start.year > this_year, end.year > this_year]):
text = 'Start year or end year is later than current year in file {}.'.format(self.file_name)
error_list.append(text)
# continue
if any([start.year == 1904, end.year == 1904]):
text = 'Start year or end year is 1904 in file {}.'.format(self.file_name)
self.logger.info(text)
error_list.append(text)
if error_list:
self.logger.info('; '.join(error_list))
return error_list
class CO2file(File):
def __init__(self, file_path='', **kwargs):
File.__init__(self, file_path, **kwargs)
def check_if_valid_file_name(self, file_name):
"""
External method.
Returns True if file_name follows the structure(s) described in method.
:param file_name:
:return:
"""
if not file_name.endswith('dat.txt'):
return False
return True
def warnings(self):
"""
Returns a list of strange things found in file. Strange things kan be handled.
:return: list with description of the warnings.
"""
raise NotImplementedError
def get_file_errors(self):
"""
Returns a list of errors in file if any. Errors are obvious faults that can not be handled.
:return list with description of the errors.
"""
error_list = self._get_file_errors()
# Check time
start, end = self.get_time_range()
d = datetime.datetime(1980, 1, 1)
this_year = datetime.datetime.now().year
if not all([start, end]):
text = 'Could not find time in file {}.'.format(self.file_name)
error_list.append(text)
if error_list:
self.logger.info('; '.join(error_list))
return error_list
class FileHandler(object):
def __init__(self, **kwargs):
self._set_logger(kwargs.get('logger'))
self.logger.debug('Starting FileHandler for Tavastland')
self.directories = {}
self.directories['mit'] = kwargs.get('mit_directory', None)
self.directories['co2'] = kwargs.get('co2_directory', None)
self.export_directory = kwargs.get('export_directory', None)
self.save_directory = None
self.current_merge_data = pd.DataFrame()
self.df_header = ['file_id', 'file_path', 'time_start', 'time_end']
self.export_time_format_str = '%Y%m%d%H%M%S'
self.package_prefix = 'ferrybox-tavastland'
self.objects = dict()
self.dfs = dict()
self.files_with_errors = dict()
self.corruped_files = dict()
self.metadata = []
self.metadata_added = {}
self.time_frozen_between = {}
list_dir_object = ListDirectory()
self.exclude_co2_types = list_dir_object.get_file_object('list_tavastland_exclude_types.txt', comment='#').get()
self.reset_time_range()
self.reset_data()
self.set_time_delta(seconds=30)
for file_type, directory in self.directories.items():
if directory:
self.set_file_directory(file_type, directory)
def _set_logger(self, logger):
if logger:
self.logger = logger
print('SETTING LOGGER', self.logger.name)
else:
logging.config.fileConfig('logging.conf')
self.logger = logging.getLogger('timedrotating')
def set_export_directory(self, directory):
"""
Sets the export directory.
:param directory:
:return:
"""
self.export_directory = directory
def set_file_directory(self, file_type, directory):
"""
Saves path to files with the given directory for the given file_type
:param file_type:
:return:
"""
this_year = datetime.datetime.now().year
if file_type == 'mit':
File_type_class = MITfile
file_type_object = MITfile(logger=self.logger)
elif file_type == 'co2':
File_type_class = CO2file
file_type_object = CO2file(logger=self.logger)
self.files_with_errors[file_type] = []
self.corruped_files[file_type] = []
self.objects[file_type] = dict()
data_lines = []
for root, dirs, files in os.walk(directory):
for name in files:
if not file_type_object.check_if_valid_file_name(name):
continue
file_path = os.path.join(root, name)
file_object = File_type_class(file_path, logger=self.logger)
start, end = file_object.get_time_range()
errors = file_object.get_file_errors()
if errors:
print('name', name)
print('errors', errors)
errors_dict = {name: errors}
self.files_with_errors[file_type].append(errors_dict)
data_lines.append([name, file_path, start, end])
self.objects[file_type][name] = file_object
if not data_lines:
raise TavastlandException('No valid {}-files found!'.format(file_type))
self.dfs[file_type] = pd.DataFrame(data_lines, columns=self.df_header)
self.dfs[file_type].sort_values('time_start', inplace=True)
def get_file_id(self, time=None, file_type='mit'):
"""
Returns the mit file matching the given input.
:param time: datetime_object
:return:
"""
if time:
result = self.dfs[file_type].loc[(self.dfs[file_type]['time_start'] <= time) &
(time <= self.dfs[file_type]['time_end']), 'file_id'].values
if len(result) > 1:
self.logger.debug('Several files matches time stamp: {}\n{}'.format(time, '\n'.join(list(result))))
raise TavastlandException('Several files matches time stamp {}: \n{}'.format(time, '\n'.join(list(result))))
elif len(result) == 0:
return None
else:
return result[0]
else:
raise AttributeError('Missing input parameter "time"')
def get_previous_file_id(self, file_id=None, time_stamp=None, file_type='mit'):
"""
Returns the previous file_id
:param file_id:
:return:
"""
df = self.dfs.get(file_type)
if file_id:
if file_id in df['file_id'].values:
index = df.index[df['file_id'] == file_id][0]
if index == 0:
return None
else:
return df.at[index-1, 'file_id']
else:
return None
elif time_stamp:
end_time_boolean = df['time_end'] < time_stamp
matching_file_id_list = df.loc[end_time_boolean]['file_id'].values
# print('='*20)
# print('matching_file_id_list')
# print(matching_file_id_list)
# print(type(matching_file_id_list))
if any(matching_file_id_list):
return matching_file_id_list[-1]
else:
return None
def set_time_range(self, time_start=None, time_end=None, time=None, file_id=None, file_type='mit'):
"""
Selects/sets the period to work with. You can select data by giving start and end time or by file_id.
Also option to find file_id by time stamp (looking at mit_file) given in time. All time objects ar of type
datetime.datetime.
:param time_start:
:param time_end:
:param time:
:param file_name:
:return:
"""
if time:
file_id = self.get_file_id(time=time, file_type=file_type)
if file_id:
for file_type in self.objects:
if file_id in self.objects[file_type]:
time_start, time_end = self.objects[file_type][file_id].get_time_range()
break
else:
raise ValueError('Could not find file_id {}')
self.reset_time_range()
self.current_time_start = time_start
self.current_time_end = time_end
def set_time_delta(self, **kwargs):
"""
Sets the timedelta allowed for matching data.
:param kwargs:
:return:
"""
self.time_delta = pd.Timedelta(**kwargs)
def reset_time_range(self):
self.current_time_start = None
self.current_time_end = None
self.reset_data()
def load_data(self):
"""
Loades data in time range. Time range is set in method select_time_range.
:return:
"""
t0 = time.time()
if not all([self.current_time_start, self.current_time_end]):
raise Exception
self.reset_data()
# Load files within time range
self.current_data['mit'] = self.get_data_within_time_range('mit', self.current_time_start, self.current_time_end)
self.current_data['co2'] = self.get_data_within_time_range('co2', self.current_time_start, self.current_time_end)
# Reset index
self.current_data['mit'] = self.current_data['mit'].reset_index(drop=True)
self.current_data['co2'] = self.current_data['co2'].reset_index(drop=True)
# print('Load data')
# print('mit', len(self.current_data['mit']))
# print('co2', len(self.current_data['co2']))
# print('Loaded in: {}'.format(time.time()-t0))
def reset_data(self):
self.current_data = {}
self.current_merge_data = pd.DataFrame()
self.pCO2_constants = {}
self.std_val_list = []
self.std_co2_list = []
self.std_latest_time = None
self.time_frozen_between = {}
def clean_files(self, export_directory, file_list=False):
if not self.current_data:
raise TavastlandException
if not file_list:
file_list = []
for key, value in self.objects.items():
for file_name in value:
if self.objects[key][file_name].data_loaded:
file_list.append(file_name)
# Clean files and save in subdirectories
for key in self.objects:
directory = os.path.join(export_directory, 'cleaned_files', key)
for file_name in file_list:
if file_name in self.objects[key]:
self.objects[key][file_name].clean_file(directory)
def get_data_within_time_range(self, file_type, time_start, time_end):
"""
Extracts data within time range from mit or c02 files. expands time limits with self.time_delta first.
:param file_type: mit or co2
:param time_start:
:param time_end:
:return:
"""
# print('get_data_within_time_range')
self.time_frozen_between[file_type] = []
object_dict = self.objects.get(file_type)
file_id_list = self.get_file_ids_within_time_range(file_type, time_start, time_end)
ts = np.datetime64(time_start)
te = np.datetime64(time_end)
df = pd.DataFrame()
for file_id in file_id_list:
if file_id in self.files_with_errors:
self.logger.warning('Discarding file {}. File has errors!'.format(file_id))
continue
object = object_dict.get(file_id)
try:
object_df = object.get_df()
except TavastlandExceptionCorrupedFile:
self.corruped_files[file_type].append(file_id)
self.logger.warning('Discarding file {}. File has errors!'.format(file_id))
continue
df = df.append(object_df)
# print('file_id', file_id)
# print('object.time_frozen_between', object.time_frozen_between)
for t in object.time_frozen_between:
# print(t, time_start, time_end)
add = False
# print(t[0], time_start)
# print(type(t[0]), type(time_start))
if t[0] != '?' and t[0] >= ts:
add = True
elif t[1] != '?' and t[1] <= te:
add = True
if add:
self.time_frozen_between[file_type].append(t)
if not len(df):
raise TavastlandExceptionNoCO2data('No data in time range {} - {}'.format(time_start, time_end))
else:
df.sort_values('time', inplace=True)
# Add file type to header
df.columns = ['{}_{}'.format(file_type, item) for item in df.columns]
df['time'] = df['{}_time'.format(file_type)]
# Strip dates
if file_type == 'co2':
time_start = time_start - self.time_delta
time_end = time_end + self.time_delta
time_boolean = (df.time >= time_start) & (df.time <= time_end)
df = df.loc[time_boolean]
df.sort_values(by='time', inplace=True)
return df
def get_file_ids_within_time_range(self, file_type, time_start, time_end):
"""
Returns a list of the matching file_id:s found in self.dfs
:param file_type:
:param time_start:
:param time_end:
:return:
"""
df = self.dfs.get(file_type)
ts = time_start - self.time_delta
te = time_end + self.time_delta
boolean = (df['time_end'] >= ts) & (df['time_end'] <= te)
# | (df['time_start'] <= ts) & (df['time_start'] <= te)
# if not any(boolean):
# boolean = (df['time_end'] >= ts) & (df['time_end'] <= te)
return sorted(df.loc[boolean, 'file_id'])
def get_files_with_errors(self, file_type):
"""
Returns a list with all files that has errors in them.
:param file_type:
:return:
"""
file_list = []
for file_name_dict in self.files_with_errors[file_type]:
file_list.append(list(file_name_dict.keys())[0])
return file_list
def merge_data(self):
"""
Merges the dataframes in self.current_data.
:return:
"""
missing_data = []
for file_type, df in self.current_data.items():
if not len(df):
missing_data.append(file_type)
if missing_data:
raise Exception('Missing data from the following sources: {}'.format(', '.join(missing_data)))
# We do not want same co2 merging to several lines in mit.
# Therefore we start by merging co2 and mit with the given tolerance.
co2_merge = pd.merge_asof(self.current_data['co2'], self.current_data['mit'],
on='time',
tolerance=self.time_delta,
direction='nearest')
# In this df we only want to keep lines that has mit_time
co2_merge = co2_merge[~pd.isna(co2_merge['mit_time'])]
# co2_merge.sort_values('time', inplace=True)
# Now we merge (outer join) the original mit-dataframe with the one we just created.
# This will create a df that only has one match of co2 for each mit (if matching).
self.current_merge_data = pd.merge(self.current_data['mit'],
co2_merge,
left_on='mit_time',
right_on='mit_time',
suffixes=('', '_remove'),
how='outer')
remove_columns = [col for col in self.current_merge_data.columns if col.endswith('_remove')]
self.current_merge_data.drop(remove_columns, axis=1, inplace=True)
self.current_merge_data = self.current_merge_data.reset_index(drop=True)
# Add time par
self.current_merge_data['time'] = self.current_merge_data['mit_time']
# Add position par
self.current_merge_data['lat'] = self.current_merge_data['mit_lat']
self.current_merge_data['lon'] = self.current_merge_data['mit_lon']
self.mit_columns = [col for col in self.current_merge_data.columns if col.startswith('mit_')]
self.co2_columns = [col for col in self.current_merge_data.columns if col.startswith('co2_')]
# Add diffs
self.current_merge_data['diff_time'] = abs(self.current_merge_data['co2_time'] - \
self.current_merge_data['mit_time']).astype('timedelta64[s]')
self.current_merge_data['diff_lat'] = self.current_merge_data['co2_lat'] - \
self.current_merge_data['mit_lat']
self.current_merge_data['diff_lon'] = self.current_merge_data['co2_lon'] - \
self.current_merge_data['mit_lon']
self.diff_columns = [col for col in self.current_merge_data.columns if col.startswith('diff_')]
if self.current_merge_data['diff_time'].isnull().values.all():
raise TavastlandExceptionNoMatchWhenMerging('No match in data between {} and {} '
'with time tolerance {} seconds'.format(self.current_time_start,
self.current_time_end,
self.time_delta.seconds))
self._sort_merge_data_columns()
# Add merge comment
if not self.metadata_added.get('time_tolerance'):
self.metadata = [f'COMMENT_MERGE;{self._get_time_string()};Data merged with time tolerance '
f'{self.time_delta.seconds} seconds.']
self.metadata_added['time_tolerance'] = True
def _sort_merge_data_columns(self):
columns = sorted(self.current_merge_data.columns)
columns.pop(columns.index('time'))
columns.pop(columns.index('lat'))
columns.pop(columns.index('lon'))
new_columns = ['time', 'lat', 'lon'] + columns
self.current_merge_data = self.current_merge_data[new_columns]
self.current_merge_data.fillna('', inplace=True)
def _mapp_columns(self, df=None):
if df is None:
df = self.current_merge_data
mapping_dir_object = MappingDirectory()
mapping = mapping_dir_object.get_file_object('mapping_tavastland.txt', from_col='co2_merged_file', to_col='nodc')
df.columns = mapping.get_mapped_list(df.columns)
def _remove_types(self):
boolean = self.current_merge_data['co2_Type'].isin(self.exclude_co2_types)
self.current_merge_data.loc[boolean, self.co2_columns] = ''
def old_remove_areas(self, file_path):
"""
Remove areas listed in file_path. file_path should be of type gismo.qc.qc_trijectory.
Maybe this class should be located in a more general place.
:param file_path:
:return:
"""
area_object = gismo.qc.qc_trajectory.FlagAreasFile(file_path)
areas = area_object.get_areas()
df = self.current_merge_data
masked_areas = []
combined_boolean = df['time'] == ''
for name, area in areas.items():
lat_min = area.get('lat_min')
lat_max = area.get('lat_max')
lon_min = area.get('lon_min')
lon_max = area.get('lon_max')
boolean = (df['lat'].astype(float) >= lat_min) & \
(df['lat'].astype(float) <= lat_max) & \
(df['lon'].astype(float) >= lon_min) & \
(df['lon'].astype(float) <= lon_max)
if len(np.where(boolean)):
masked_areas.append(name)
combined_boolean = combined_boolean | boolean
# Remove areas
self.current_merge_data = self.current_merge_data.loc[~combined_boolean, :]
return masked_areas
def get_nr_rows(self, file_type):
return len(self.current_data[file_type])
def get_min_and_max_time(self):
"""
Returns the minimum and maximum time found looking in both time_start and time_end and all file_types.
:return:
"""
time_list = []
for df in self.dfs.values():
time_list.extend(list(df['time_start']))
time_list.extend(list(df['time_end']))
return min(time_list), max(time_list)
def get_merge_data(self):
"""
Returns merge data limited by time range
:return:
"""
boolean = (self.current_merge_data['time'] >= self.current_time_start) & \
(self.current_merge_data['time'] <= self.current_time_end)
return self.current_merge_data.loc[boolean, :].copy()
def old_map_header_like_iocftp(self):
"""
:return:
"""
mappings = mappinglib.MappingDirectory()
mapping_object = mappings.get_mapping_object('mapping_tavastland',
from_col='merged_file',
to_col='IOCFTP_tavastland')
new_header = []
for col in self.current_merge_data.columns:
new_header.append(mapping_object.get(col))
self.current_merge_data.columns = new_header
def old_map_header_like_internal(self):
"""
:return:
"""
mappings = mappinglib.MappingDirectory()
mapping_object = mappings.get_mapping_object('mapping_tavastland',
from_col='IOCFTP_tavastland',
to_col='internal')
new_header = []
for col in self.current_merge_data.columns:
new_header.append(mapping_object.get(col))
self.current_merge_data.columns = new_header
def calculate_pCO2(self):
"""
Calculates pCO2 on self.current_merge_data
:return:
"""
self.current_merge_data['calc_k'] = np.nan
self.current_merge_data['calc_m'] = np.nan
self.current_merge_data['calc_Pequ'] = np.nan
self.current_merge_data['calc_pCO2 dry air'] = np.nan
self.current_merge_data['calc_xCO2'] = np.nan
self.current_merge_data['calc_pCO2'] = np.nan
items = ['calc_k', 'calc_m', 'calc_xCO2', 'calc_Pequ', 'calc_pCO2 dry air', 'calc_time_since_latest_std']
for i in self.current_merge_data.index:
values = self._get_pCO2_data_from_row(self.current_merge_data.iloc[i])
for key in items:
self.current_merge_data.at[i, key] = values.get(key, np.nan)
# self.current_merge_data.at[i, 'calc_k'] = values.get('calc_k', np.nan)
# self.current_merge_data.at[i, 'calc_m'] = values.get('calc_m', np.nan)
# self.current_merge_data.at[i, 'calc_Pequ'] = values.get('calc_Pequ', np.nan)
# self.current_merge_data.at[i, 'calc_pCO2 dry air'] = values.get('calc_pCO2 dry air', np.nan)
# self.current_merge_data.at[i, 'calc_xCO2'] = values.get('calc_xCO2', np.nan)
self._calculate_pCO2()
self._sort_merge_data_columns()
self._remove_types()
# self._mapp_columns()
def _calculate_pCO2(self):
salinity_par = 'mit_Sosal'
temp_par = 'mit_Soxtemp'
equ_temp_par = 'co2_equ temp'
# Tequ = self.current_merge_data['co2_equ temp'].astype(float) + 273.15 # temp in Kelvin
try:
Tequ = np.array([as_float(item) for item in self.current_merge_data[equ_temp_par]]) + 273.15 # temp in Kelvin
except:
raise
self.current_merge_data['calc_Tequ'] = Tequ
Pequ = self.current_merge_data['calc_Pequ']
# Pequ = self.current_merge_data['co2_equ press'].astype(float) + self.current_merge_data['co2_licor press'].astype(float)
# Pequ is not in the same order as the previous calculated self.current_merge_data['calc_Pequ'] (has * 1e-3)
VP_H2O = np.exp(24.4543 - 67.4509 * 100 / Tequ -
4.8489 * np.log(Tequ / 100) -
0.000544 * self.current_merge_data[salinity_par].astype(float))
self.current_merge_data['calc_VP_H2O'] = VP_H2O
pCO2 = self.current_merge_data['calc_xCO2'] * (Pequ / 1013.25 - VP_H2O) * np.exp(
0.0423 * (self.current_merge_data[temp_par].astype(float) + 273.15 - Tequ))
fCO2 = pCO2 * np.exp(((-1636.75 + 12.0408 * Tequ - 0.0327957 * Tequ ** 2 + 3.16528 * 1e-5 * Tequ ** 3)
+ 2 * (1 - self.current_merge_data['calc_xCO2'] * 1e-6) ** 2 * (
57.7 - 0.118 * Tequ)) * Pequ / 1013.25 / (82.0575 * Tequ))
self.current_merge_data['calc_pCO2'] = pCO2
self.current_merge_data['calc_fCO2 SST'] = fCO2
def _get_pCO2_data_from_row(self, series):
"""
Calculates xCO2 etc. for row or saves information needed to calculate pCO2.
:param row_series: pandas.Series (row in df)
:return:
"""
return_dict = {'calc_k': np.nan,
'calc_m': np.nan,
'calc_xCO2': np.nan,
'calc_Pequ': np.nan,
'calc_pCO2 dry air': np.nan,
'calc_time_since_latest_std': np.nan}
type_value = series['co2_Type']
if type(type_value) == float and np.isnan(type_value):
return return_dict
co2_time = series['co2_time']
co2_value = as_float(series['co2_CO2 um/m'])
std_value = as_float(series['co2_std val'])
# print('co2_equ press in series', 'co2_equ press' in series) False
# print('co2_licor press in series', 'co2_licor press' in series) True
equ_press_value = as_float(series['co2_equ press'])
# equ_press_value = as_float(series['calc_Pequ'])
licor_press_value = as_float(series['co2_licor press'])
# print('-'*30)
# print('SERIES')
# print(series['co2_time'])
# print(series['co2_source_file'])
# print(series['mit_time'])
# print(series['mit_source_file'])
if not type_value:
return dict()
# Added by Johannes 2020-04-29
if not hasattr(self, 'co2_time_list'):
self.co2_time_list = []
if not hasattr(self, 'std_val_list'):
self.std_val_list = []
if not hasattr(self, 'std_co2_list'):
self.std_co2_list = []
if 'STD' in type_value:
if is_std(type_value):
if co2_time in self.co2_time_list:
return dict()
# print('¤'*40)
# print('STD', type_value)
# print(self.std_val_list)
# This row should be saved for regression calculation
self.co2_time_list.append(co2_time)
self.std_val_list.append(std_value)
self.std_co2_list.append(co2_value)
self.std_latest_time = series['time']
# print('STD: self.std_latest_time', self.std_latest_time)
return dict()
else:
return dict()
else:
# Calculate/save constants if data is available
if self.std_val_list:
# print('self.std_latest_time', self.std_latest_time)
# print()
# print('¤'*40)
# for t, st, co in zip(self.co2_time_list, self.std_val_list, self.std_co2_list):
# print(t, st, co)
# print('-'*40)
self._set_constants(self.std_val_list, self.std_co2_list, file_id=self.get_file_id(time=series['time'],
file_type='co2'))
# # Reset lists
# self.std_val_list = []
# self.std_co2_list = []
if not self.pCO2_constants:
self._set_constants_for_timestamp(series['time'])
# return {'calc_pCO2 dry air': co2_value,
# 'calc_xCO2': co2_value}
# Reset lists
self.co2_time_list = []
self.std_val_list = []
self.std_co2_list = []
# Make calculations
k = self.pCO2_constants['calc_k'] # k in y = kx + m
m = self.pCO2_constants['calc_m'] # m in y = kx + m
x = (co2_value - m) / k # x in y = kx + m
xCO2 = co2_value + (1 - k) * x + m
# value = measured Value + correction (correction = diff between y = x and y = kx + m)
Pequ = (equ_press_value + licor_press_value)
# pressure due to EQU press and licor press
pCO2_dry_air = xCO2 * Pequ * 1e-3
# Check time since latest standard gas
time_since_latest_std = np.nan
if self.std_latest_time:
time_since_latest_std = int(abs((self.std_latest_time - series['time']).total_seconds()))
return_dict = {'calc_k': k,
'calc_m': m,
'calc_xCO2': xCO2,
'calc_Pequ': Pequ,
'calc_pCO2 dry air': pCO2_dry_air,
'calc_time_since_latest_std': time_since_latest_std}
return return_dict
def _set_constants(self, std_val_list=[], std_co2_list=[], file_id='', **kwargs):
"""
Returns the constants from the regression calculated from standard gases.
:return:
"""
# if len(std_val_list) < 3:
# return
try:
# print('std_val_list', std_val_list, len(std_val_list)/3.)
# print('std_co2_list', std_co2_list, len(std_co2_list)/3.)
adapt = np.polyfit(np.array(std_val_list), np.array(std_co2_list), 1)
except:
# print('='*30)
# print(file_id)
# for val, co2 in zip(std_val_list, std_co2_list):
# print(val, co2, type(val), type(co2))
raise
self.pCO2_constants = dict(calc_k=adapt[0],
calc_m=adapt[1],
file_id=file_id)
def _set_constants_for_timestamp(self, time_stamp):
"""
Search in file or previous files to find closest STD rows. Sets constants and saves self.std_latest_time.
:return:
"""
data = self.get_std_basis_for_timestamp(time_stamp)
self._set_constants(**data)
self.std_latest_time = data.get('std_latest_time')
def get_std_basis_for_timestamp(self, time_object):
"""
Finds information of the most resent std gasses
:param time_object:
:return:
"""
index_list = []
file_id = self.get_file_id(time=time_object, file_type='co2')
if not file_id:
# Cannot find file id for the given time stamp. Need to find the latest file id.
file_id = self.get_previous_file_id(time_stamp=time_object, file_type='co2')
if not file_id:
raise TavastlandExceptionNoCO2data('No CO2 file found for time {} or earlier!'.format(time_object))
while file_id and not index_list:
# print('=' * 40)
# print('looking for get_std_basis_for_timestamp for time: {}'.format(time_object))
# print('in file_id:', file_id)
obj = self.objects['co2'][file_id]
try:
df = obj.get_df()
except TavastlandExceptionCorrupedFile:
continue
df = df.loc[df['time'] <= time_object]
for i in list(df.index)[::-1]:
value = df.at[i, 'Type']
if 'STD' in value:
if is_std(value):
index_list.append(i)
elif index_list:
break
if not index_list:
# No STD values found
# print('-', file_id)
file_id = self.get_previous_file_id(file_id=file_id, file_type='co2')
# print('-', file_id)
index_list.reverse()
# print(index_list)
std_latest_time = df.at[index_list[-1], 'time']
std_df = df.iloc[index_list, :]
std_val_list = [as_float(item) for item in std_df['std val']]
std_co2_list = [as_float(item) for item in std_df['CO2 um/m']]
return_dict = dict(file_id=file_id,
std_latest_time=std_latest_time,
std_val_list=std_val_list,
std_co2_list=std_co2_list)
return return_dict
def get_types_in_merge_data(self):
"""
Returns a list of types in loaded merged data
:return:
"""
merge_data = self.get_merge_data()
all_types = sorted(set(merge_data['co2_Type']))
if '' in all_types:
all_types.pop(all_types.index(''))
return all_types
def save_data(self, directory=None, overwrite=False, **kwargs):
self.save_dir = self._get_export_directory(directory)
if os.path.exists(self.save_dir):
if not overwrite:
raise FileExistsError('One or more files exists. Set overwrite=True to overwrite package')
if not os.path.exists(self.save_dir):
os.makedirs(self.save_dir)
processed_file_path = self._save_merge_data(directory=self.save_dir, **kwargs)
raw_mit_file_path = self._save_mit_data(directory=self.save_dir, **kwargs)
raw_co2_file_path = self._save_co2_data(directory=self.save_dir, **kwargs)
# Add comment to metadata
if not self.metadata_added.get('merged_files'):
mit_file_name = os.path.basename(raw_mit_file_path)
co2_file_name = os.path.basename(raw_co2_file_path)
time_string = self._get_time_string()
self.metadata.append(';'.join(['COMMENT_MERGE', time_string, f'Data merged are in files: {mit_file_name} and {co2_file_name}']))
self.metadata_added['merged_files'] = True
# Add "time frozen" comment to metadata
if not self.metadata_added.get('frozen_time'):
self._add_frozen_time_comment()
self.metadata_added['frozen_time'] = True
# Write metadata file
merge_file_base = os.path.basename(processed_file_path).split('.')[0]
metadata_file_path = os.path.join(self.save_dir, f'metadata_{merge_file_base}.txt')
self._save_metadata(metadata_file_path)
return self.save_dir
def _add_frozen_time_comment(self):
for file_type, between in self.time_frozen_between.items():
if not between:
continue
time_string = self._get_time_string()
between_list = []
for (f, t) in between:
if f != '?':
f = pd.to_datetime(f).strftime('%Y%m%d%H%M%S')
if t != '?':
t = pd.to_datetime(t).strftime('%Y%m%d%H%M%S')
between_list.append(f'{f}-{t}')
between_str = ','.join(between_list)
self.metadata.append(';'.join(
['COMMENT_MERGE', time_string, f'In {file_type}-files time was frozen between: {between_str}']))
def _get_time_string(self):
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M')
def _get_save_dir_name(self):
"""
Returns the directory name to save data in. Directory name is based on platform an time span.
:return:
"""
return '{}_{}'.format(self.package_prefix,
self._get_file_time_string())
def _get_file_time_string(self):
return '{}_{}'.format(self.current_time_start.strftime(
self.export_time_format_str),
self.current_time_end.strftime(
self.export_time_format_str))
def _save_metadata(self, file_path):
with open(file_path, 'w') as fid:
for item in self.metadata:
fid.write(f'{item}\n')
def _save_mit_data(self, directory=None, **kwargs):
"""
Saves mit data to file. The scope is the time span used for merging. e.i the time span +- time delta.
:param directory:
:param kwargs:
:return:
"""
df = self.current_data['mit']
time_list = df['time'].values
file_path = os.path.join(directory,
'mit_{}_{}.txt'.format(pd.to_datetime(time_list[0]).strftime(self.export_time_format_str),
pd.to_datetime(time_list[-1]).strftime(self.export_time_format_str)))
df.to_csv(file_path, sep='\t', index=False)
return file_path
def _save_co2_data(self, directory=None, **kwargs):
"""
Saves co2 data to file. The scope is the time span used for merging. e.i the time span +- time delta.
:param directory:
:param kwargs:
:return:
"""
df = self.current_data['co2']
time_list = df['time'].values
file_path = os.path.join(directory,
'co2_{}_{}.txt'.format(pd.to_datetime(time_list[0]).strftime(self.export_time_format_str),
pd.to_datetime(time_list[-1]).strftime(self.export_time_format_str)))
df.to_csv(file_path, sep='\t', index=False)
return file_path
def _save_merge_data(self, directory=None, **kwargs):
if not os.path.exists(directory):
# Added by Johannes 2020-04-29
os.makedirs(directory)
file_path = os.path.join(directory, 'merge_{}_{}.txt'.format(self.package_prefix,
self._get_file_time_string()))
kw = dict(sep='\t',
index=False)
merge_data = self.get_merge_data()
self._mapp_columns(merge_data)
# @Johannes 2020-04-29
self._set_decimals(merge_data)
if kwargs.get('co2_types'):
boolean = merge_data['co2_Type'].isin(kwargs.get('co2_types'))
merge_data.loc[boolean].to_csv(file_path, **kw)
else:
merge_data.to_csv(file_path, **kw)
return file_path
@staticmethod
def _set_decimals(df):
"""
@Johannes
Temporary solution for setting number of decimals.
Rather then using df.apply() we enhance performance with numpy.vectorize()
:param df: pd.DataFrame
:return: No return. We intend to change values df-changes(inplace=True)
"""
def vectorize(x):
if x:
return round(x, 3)
else:
return x
parameters = ['calc_Pequ', 'calc_Tequ', 'calc_VP_H2O', 'calc_fCO2 SST', 'calc_k', 'calc_m',
'calc_pCO2', 'calc_pCO2 dry air', 'calc_time_since_latest_std', 'calc_xCO2']
for parameter in parameters:
if parameter in df:
if df[parameter].any():
df[parameter] = df[parameter].apply(as_float)
df[parameter] = np.vectorize(vectorize)(df[parameter])
def _get_export_directory(self, directory=None):
"""
Returns the export directory and creates it if non existing.
:param directory:
:return:
"""
if not directory:
directory = self.export_directory
if not directory:
raise AttributeError('No export directory found or given')
exp_directory = os.path.join(directory, self._get_save_dir_name())
self.save_directory = exp_directory
return exp_directory
class ManageTavastlandFiles(object):
def __init__(self, directory, **kwargs):
self._set_logger(kwargs.get('logger'))
self.directory = directory
self.files_id = 'tavastland'
self.match_format = '\d{14}_\d{14}'
self.mapping_files = MappingDirectory()
self.mapping_file = self.mapping_files.get_path('mapping_tavastland.txt')
self.col_files = 'nodc'
self.col_qc0 = 'iocftp_number'
self._load_directory()
def _set_logger(self, logger):
if logger:
self.logger = logger
else:
logging.config.fileConfig('logging.conf')
self.logger = logging.getLogger('timedrotating')
def _load_directory(self):
self.dir_object = Directory(self.directory, match_string=self.files_id, match_format=self.match_format)
def get_file_list(self):
return self.dir_object.get_list()
def list_files(self):
print('Files in directory:')
for file in self.get_file_list():
print(f' {file}')
def _get_merge_file_path(self):
for fname in self.get_file_list():
if fname.startswith('merge_'):
return self.dir_object.get_path(file_id=fname)
def flag_areas(self):
mask_areas_file_id = 'mask_areas_tavastland.txt'
mask_files = MaskAreasDirectory()
mask_obj = mask_files.get_file_object(mask_areas_file_id)
data_file_path = self._get_merge_file_path()
df = txt_reader.load_txt_df(data_file_path)
mapping_files = sharkpylib.file.file_handlers.MappingDirectory()
mapping_obj = mapping_files.get_file_object('mapping_tavastland.txt', from_col='co2_merged_file', to_col='nodc')
lat_list = [float(value) for value in df[mapping_obj.get('lat', 'lat')]]
lon_list = [float(value) for value in df[mapping_obj.get('lon', 'lon')]]
boolean = mask_obj.get_masked_boolean(lat_list, lon_list)
# Loop q columns
for col in df.columns:
if col.startswith('Q_'):
df[col][boolean] = '4'
df.to_csv(data_file_path, sep='\t', index=False)
return data_file_path
def create_qc0_file(self):
qc0_file_path = mappinglib.create_file_for_qc0(file_path=self._get_merge_file_path(),
mapping_file_path=self.mapping_file,
file_col=self.col_files,
qc0_col=self.col_qc0,
save_file=True)
return qc0_file_path
def add_nodc_qc_columns(self):
merge_file_path = self._get_merge_file_path()
mappinglib.add_nodc_qc_columns_to_df(file_path=merge_file_path,
save_file=True)
return merge_file_path
def add_qc0_info_to_nodc_column_file(self):
merge_file_path = self._get_merge_file_path()
mappinglib.merge_data_from_qc0(main_file_path=merge_file_path,
mapping_file_path=self.mapping_file,
file_col=self.col_files,
qc0_col=self.col_qc0,
save_file=True)
return merge_file_path
def as_float(item):
try:
return float(item)
except:
return np.nan
def is_std(item):
if not item.startswith('STD'):
return False
# Drain is acceptable as STD: 2019-11-20
# if 'DRAIN' in item:
# return False
if item[-1] in ['z', 's']:
return False
return True
if __name__ == '__main__':
file_handler = FileHandler()
|
<gh_stars>1-10
package argocd
import (
"context"
"github.com/projectsyn/lieutenant-api/pkg/api"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/klog"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
var (
argoLabels = map[string]string{
"app.kubernetes.io/part-of": "argocd",
"argocd.argoproj.io/instance": "argocd",
}
argoSSHSecretName = "argo-ssh-key"
argoSSHPublicKey = "sshPublicKey"
argoSSHPrivateKey = "sshPrivateKey"
argoSSHConfigMapName = "argocd-ssh-known-hosts-cm"
argoTLSConfigMapName = "argocd-tls-certs-cm"
argoRbacConfigMapName = "argocd-rbac-cm"
argoConfigMapName = "argocd-cm"
argoSecretName = "argocd-secret"
argoRbacName = "argocd-application-controller"
argoRootAppName = "root"
argoProjectName = "syn"
argoAppsPath = "manifests/apps/"
)
// Apply reconciles the Argo CD deployments
func Apply(ctx context.Context, config *rest.Config, namespace, argoImage, redisArgoImage string, apiClient *api.Client, cluster *api.Cluster) error {
clientset, err := kubernetes.NewForConfig(config)
if err != nil {
return err
}
deployments, err := clientset.AppsV1().Deployments(namespace).List(ctx, metav1.ListOptions{
LabelSelector: "app.kubernetes.io/part-of=argocd",
})
if err != nil {
return err
}
expectedDeploymentCount := 3
foundDeploymentCount := len(deployments.Items)
statefulsets, err := clientset.AppsV1().StatefulSets(namespace).List(ctx, metav1.ListOptions{
LabelSelector: "app.kubernetes.io/part-of=argocd",
})
if err != nil {
return err
}
expectedStatefulSetCount := 1
foundStatefulSetCount := len(statefulsets.Items)
if foundDeploymentCount == expectedDeploymentCount && foundStatefulSetCount == expectedStatefulSetCount {
// Found expected deployments, found expected statefulsets, skip
return nil
}
klog.Infof("Found %d of expected %d deployments, found %d of expected %d statefulsets, bootstrapping now", foundDeploymentCount, expectedDeploymentCount, foundStatefulSetCount, expectedStatefulSetCount)
return bootstrapArgo(ctx, clientset, config, namespace, argoImage, redisArgoImage, apiClient, cluster)
}
func bootstrapArgo(ctx context.Context, clientset *kubernetes.Clientset, config *rest.Config, namespace, argoImage, redisArgoImage string, apiClient *api.Client, cluster *api.Cluster) error {
if err := createArgoCDConfigMaps(ctx, cluster, clientset, namespace); err != nil {
return err
}
if err := createArgoCRDs(ctx, config); err != nil {
return err
}
if err := createRedisDeployment(ctx, clientset, namespace, argoImage, redisArgoImage); err != nil {
return err
}
if err := createRepoServerDeployment(ctx, clientset, namespace, argoImage); err != nil {
return err
}
if err := createServerDeployment(ctx, clientset, namespace, argoImage); err != nil {
return err
}
if err := createArgoProject(ctx, cluster, config, namespace); err != nil {
return err
}
if err := createArgoApp(ctx, cluster, config, namespace); err != nil {
return err
}
if err := createApplicationControllerStatefulSet(ctx, clientset, namespace, argoImage); err != nil {
return err
}
return nil
}
|
Quantum transport simulation of the two-dimensional GaSb transistors
Owing to the high carrier mobility, two-dimensional (2D) gallium antimonite (GaSb) is a promising channel material for field-effect transistors (FETs) in the post-silicon era. We investigated the ballistic performance of the 2D GaSb metal–oxide–semiconductor FETs with a 10 nm-gate-length by the ab initio quantum transport simulation. Because of the wider bandgap and better gate-control ability, the performance of the 10-nm monolayer (ML) GaSb FETs is generally superior to the bilayer counterparts, including the three-to-four orders of magnitude larger on-current. Via hydrogenation, the delay-time and power consumption can be further enhanced with magnitude up to 35% and 57%, respectively, thanks to the expanded bandgap. The 10-nm ML GaSb FETs can almost meet the International Technology Roadmap for Semiconductors (ITRS) for high-performance demands in terms of the on-state current, intrinsic delay time, and power-delay product. |
def zip(i):
if i.get('data_uoa', '') != '':
del(i['data_uoa'])
ruoa = i.get('repo_uoa', '')
if ruoa != '':
if ruoa.find('*') < 0 and ruoa.find('?') < 0:
i['data_uoa'] = ruoa
else:
del(i['repo_uoa'])
i['module_uoa'] = cfg['module_repo_name']
i['data'] = i.get('cid', '')
if i.get('cid', '') != '':
del(i['cid'])
return access(i) |
Traffic on Interstate in Salt Lake City, ahead of the Memorial Day weekend May 23, 2014. (Photo: Rick Bowmer, AP)
Drivers, start your engines.
The 99th running of the iconic Indianapolis 500 roars to start Sunday, but the nation's highways were already filling up Thursday for Memorial Day weekend.
AAA starts counting travelers for the summer's first holiday weekend on Thursday, and they expect 37.2 million people to attempt the great escape -- travel 50 miles or more from home -- this weekend. That's the biggest Memorial Day weekend mini-migration in a decade. About 33 million of them will drive.
"Following a harsh winter, many Americans are trading in their snow boots for flip flops and making plans to start the season with a vacation getaway," said AAA president Marshall Doney.
Vacationers expecting to beat the rush by leaving Thursday may not find highway solace, at least in metro Washington, D.C.
"Traffic on Washington area highways... is expected to reach its worst point on Thursday afternoon, running counter to a common perception that the Friday before the holiday weekend is the worst time to travel," the region's Transportation Planning Board warned.
In fact, local traffic the Thursday of Memorial Day weekend was actually worse than Friday in each of five years that were studied, the board said. The problem, it seems, is that the pre-holiday Thursday presents the worst of both worlds. Everyone is driving -- either commuting to or from work or heading out for their holiday that day.
Drivers sitting in traffic can take some comfort in the cost of gas. Most drivers will likely pay the lowest Memorial Day gas prices in at least five years. Today's national average price of regular gas is $2.73 per gallon, almost a buck less than the average price a year ago.
The obvious alternative for dodging highway madness is the airport. Uh, but maybe not on Friday. That day, according to the John Heimlich, chief economist for Airlines for America, will be 2015's third busiest day for air travel.
Staycation, anyone?
Contributing: Bart Jansen
Read or Share this story: http://usat.ly/1HkleeA |
Minerva Reefs
The Minerva Reefs (Tongan: Ongo Teleki) are a group of two mostly submerged atolls located in the Pacific Ocean south of Fiji and Tonga.
Name [ edit ]
The reefs were named after the whaleship Minerva, wrecked on what became known as South Minerva after setting out from Sydney in 1829. Many other ships would follow, for example Strathcona, which was sailing north soon after completion in Auckland in 1914. In both cases most of the crew saved themselves in whaleboats or rafts and reached the Lau Islands in Fiji.[citation needed]
History [ edit ]
The reefs were first discovered by Captain John Nicholson of LMS Haweis in December 1818 as reported in the Sydney Gazette 30 January 1819.[1] Captain H. M. Denham of HMS Herald surveyed the reefs in 1854 and renamed them after the Australian whaler Minerva which ran aground on South Minerva Reef on 9 September 1829.[2][3]
Republic of Minerva [ edit ]
Flag of the Republic of Minerva
In 1972, real-estate millionaire Michael Oliver, of the Phoenix Foundation, sought to establish a libertarian country on the reefs. Oliver formed a syndicate, the Ocean Life Research Foundation, which had considerable finances for the project and had offices in New York City and London.[4] In 1971, the organization constructed a steel tower on the reef.[4] The Republic of Minerva issued a declaration of independence on 19 January 1972.[5] Morris Davis was elected as the President of Minerva.[6]
Tonga’s claim to the reef was recognized by the South Pacific Forum in September 1972. A Tongan expedition was sent to enforce the claim, arriving on 18 June 1972. The Flag of the Tonga was raised on 19 June 1972 on North Minerva and on South Minerva on 21 June 1972.[4][7]
In 1982, a group of Americans led again by Morris Davis tried to occupy the reefs, but were forced off by Tongan troops after three weeks.[citation needed] According to Reason, Minerva has been "more or less reclaimed by the sea".[8]
Territorial dispute [ edit ]
In 2005, Fiji declared that it did not recognize any maritime water claims by Tonga to the Minerva Reefs under the UNCLOS agreements. In November 2005, Fiji lodged a complaint with the International Seabed Authority concerning Tonga's maritime waters claims surrounding Minerva. Tonga lodged a counter claim. In 2010 the Fijian Navy destroyed navigation lights at the entrance to the lagoon. In late May 2011, they again destroyed navigational equipment installed by Tongans. In early June 2011, two Royal Tongan Navy ships were sent to the reef to replace the equipment, and to reassert Tonga's claim to the territory. Fijian Navy ships in the vicinity reportedly withdrew as the Tongans approached.[9][10]
In an effort to settle the dispute, the government of Tonga revealed a proposal in early July 2014 to give the Minerva Reefs to Fiji in exchange for the Lau Group of islands.[11] In a statement to the Tonga Daily News, Lands Minister Lord Maʻafu Tukuiʻaulahi announced that he would make the proposal to Fiji's Minister for Foreign Affairs, Ratu Inoke Kubuabola. Some Tongans have Lauan ancestors and many Lauans have Tongan ancestors; Tonga's Lands Minister is named after Enele Ma'afu, the Tongan Prince who originally claimed parts of Lau for Tonga.[12]
Geography [ edit ]
Minerva Reefs
A yacht anchored at South Minerva.
Area: North Reef diameter about 5.6 kilometres (3.5 mi), South Reef diameter of about 4.8 kilometres (3.0 mi). Terrain: two (atolls) on dormant volcanic seamounts.
Both Minerva Reefs are about 435 kilometres (270 mi) southwest of the Tongatapu Group. The atolls are on a common submarine platform from 549 to 1,097 metres (1,801 to 3,599 ft) below the surface of the sea. North Minerva is circular in shape and has a diameter of about 5.6 kilometres (3.5 mi). There is a small sand bar around the atoll, awash at high tide, with a small entrance into the flat lagoon with a somewhat deep harbor. South Minerva is parted into The East Reef and the West Reef, both circular with a diameter of about 4.8 kilometres (3.0 mi). Remnants of shipwrecks and platforms remain on the atolls, plus functioning navigation beacons.
Geologically, Minerva Reef is of a limestone base formed from uplifted coral formations elevated by now-dormant volcanic activity.
The climate is basically subtropical with a distinct warm period (December–April), during which the temperatures rise above 32 °C (90 °F), and a cooler period (May–November), with temperatures rarely rising above 27 °C (80 °F). The temperature increases from 23 °C to 27 °C (74 °F to 80 °F), and the annual rainfall is from 170 to 297 centimeters (67–117 in.) as one moves from Cardea in the south to the more northerly islands closer to the Equator. The mean daily humidity is 80 percent.
Both North and South Minerva Reefs are used as anchorages by private yachts traveling between New Zealand and Tonga or Fiji.[13] North Minerva (Tongan: Teleki Tokelau) offers the more protected anchorage, with a single, easily negotiated, west-facing pass that offers access to the large, calm lagoon with extensive sandy areas. South Minerva (Tongan: Teleki Tonga) is in shape similar to an infinity symbol, with its eastern lobe partially open to the ocean on the northern side.
Shipwrecks [ edit ]
Sailboat wreck on North Minerva reef
On July 7 1962 the Tuaikaepau ('Slow But Sure'), a Tongan vessel on its way to New Zealand, struck the reefs.[14] This 15-metre (49 ft) wooden vessel was built in 1902 at the same yard as the Strathcona. The crew and passengers survived by living in the remains of a Japanese freighter. There they remained for three months and several died. Without tools, Captain Tēvita Fifita built a small boat using wood recovered from the ship. With this raft, named Malolelei ('Good Day'), he and several others sailed to Fiji in one week.
See also [ edit ]
References [ edit ]
Further reading [ edit ]
Interview with Oliver at Stay Free Magazine
Coordinates: |
package influx
type runOption func(*influxDB) error
// WithUsername causes Run to use a given username to connect to DB.
func WithUsername(u string) runOption {
return func(idb *influxDB) error {
idb.username = u
return nil
}
}
// WithPassword causes Run to use a given password to connect to DB.
func WithPassword(p string) runOption {
return func(idb *influxDB) error {
idb.password = p
return nil
}
}
// WithPrecision causes Run to use a given precision when pushing to DB (default is seconds).
func WithPrecision(p string) runOption {
return func(idb *influxDB) error {
idb.precision = p
return nil
}
}
// WithRetentionPolicy causes Run to use a given retention policy when pushing to DB.
func WithRetentionPolicy(rp string) runOption {
return func(idb *influxDB) error {
idb.retentionPolicy = rp
return nil
}
}
// WithWriteConsistency causes Run to use a given write consistency when pushing to DB.
func WithWriteConsistency(wc string) runOption {
return func(idb *influxDB) error {
idb.writeConsistency = wc
return nil
}
}
|
#include <bits/stdc++.h>
using namespace std;
#define rep(i, a, b) for(int i = a; i < (b); ++i)
#define trav(a, x) for(auto& a : x)
#define F first
#define S second
#define all(x) x.begin(), x.end()
#define sz(x) (int)(x).size()
typedef long long ll;
typedef pair<int, int> pii;
typedef vector<int> vi;
const int maxn = 412;
string s[maxn];
int h, w, dx[] = {1, -1, 0, 0}, dy[] = {0, 0, 1, -1};
bool vis[maxn][maxn];
void dfs(int x, int y, int &cnt_w, int &cnt_b){
if(vis[x][y]) return;
vis[x][y] = true;
if(s[x][y] == '#') cnt_w ++;
else cnt_b++;
rep(i,0,4){
int xx = x + dx[i], yy = y + dy[i];
if(xx < h && xx >=0 && yy < w && yy >= 0 && s[x][y] != s[xx][yy]) dfs(xx, yy, cnt_w, cnt_b);
}
}
int main() {
#ifdef LOCAL_DEFINE
freopen("test.in","r",stdin);
#else
cin.sync_with_stdio(0); cin.tie(0);
#endif
cin >> h >> w;
rep(i,0,h) cin >> s[i];
ll ans = 0;
rep(i,0,h){
rep(j,0,w){
if(!vis[i][j]){
int cnt_w = 0, cnt_b = 0;
dfs(i, j, cnt_w, cnt_b);
ans += 1ll * cnt_w * cnt_b;
}
}
}
cout << ans << '\n';
#ifdef LOCAL_DEFINE
cerr << "Time elapsed: " << 1.0 * (double) clock() / CLOCKS_PER_SEC << " s.\n";
#endif
}
|
/**
* Read the global command completion coalescing ports register.
*/
static int HbaCccPorts_r(PAHCI ahci, uint32_t iReg, uint32_t *pu32Value)
{
Log(("%s: read regHbaCccPorts=%#010x\n", __FUNCTION__, ahci->regHbaCccPorts));
#ifdef LOG_ENABLED
Log(("%s:", __FUNCTION__));
unsigned i;
for (i = 0; i < ahci->cPortsImpl; i++)
{
if ((ahci->regHbaCccPorts >> i) & 0x01)
Log((" P%d", i));
}
Log(("\n"));
#endif
*pu32Value = ahci->regHbaCccPorts;
return VINF_SUCCESS;
} |
TokyoGirls'Update
The Mayu Tomita Stabbing Incident and the Problem of Protecting Stalking Victims in Japan
Sponsored Links
Mayu Tomita, 20-year old singer-songwriter and former idol was tragically stabbed in the neck and chest by Tomohiro Iwazaki, 27, shortly before she was to take the stage at Event Space Solid in Musashi-Koganei, Tokyo on May 21, 2016. Tomita remains in critical condition and Iwazaki has been arrested.
Many of the media outlets who have reported on this incident, including many here in Japan, have been referring to Tomita as a “pop star” or “idol” and her assailant as a “fan”, even comparing it to when Rina Kawaei, Anna Iriyama, and a staff member were slashed during an AKB48 handshake event held in Iwate in 2014. These are not completely true but, the mass media is known for exaggerating in order to stir public interest and gain views. Sadly, it seems that other cases where women were stalked and killed have not received the same level of attention. However, we would like to take this time to find out more about who Mayu Tomita is and address the serious issue of stalking in Japan at this time.
First of all, here are some of the details about Mayu Tomita that we have found. Although Tomita (冨田真由) shares the same romanized name as former 5th generation AKB48 Kenkyuusei (冨田麻友), they are not the same person. Tomita was in an idol group named Secret Girls which was part of a Fuji TV drama (2011 – 2012) of the same name. Also in the group were current THE HOOPERS member Nanami Mitsuhashi (aka SENA) and former Idoling!!! members Nao Asahi, Kaede Hashimoto, and Yuna Ito.
Tomita also appeared in the 2010 NHK drama “Ichigo to Senbei” and the first episode of 2011’s “Kamen Rider Forza” as well as several stage plays. She made a brief appearance in the MV for rock band Galileo Galilei’s 1st single “Natsuzora” (release date: June 9, 2010). She began activities as a singer-songwriter during late 2015, performing in small live houses and restaurants.
The problem with stalking in Japan lies with the current anti-stalking laws which do little to protect the victims. The current laws do not include cyberstalking via social media. The suspect is simply given a verbal warning upon their first offense and a written warning for their second offense. Tomita had approached police in Musashino on May 9th to have them stop Iwazaki from harassing her and her mother had contacted Kyoto police (where Iwazaki was reported to have been living) on May 4th. No immediate action was taken by the police as they did not interpret the messages as threatening and had to confirm whether it was actually Iwazaki who has been sending the messages.
On October 8, 2013, high school student and aspiring actress Saaya Suzuki, 18, was stabbed to death in her home in Mitaka, Tokyo by ex-boyfriend Charles Thomas Ikenaga, 21. She had received death threats from Ikenaga earlier in the month and she had even gone to the police station on the morning of her murder after he had been seen outside of her home. Police called the Ikenaga’s phone 3 times and upon receiving no answer, took no further action. It was later learned that the phone did not even belong to him. Ikenaga was sentenced to 22 years in prison as a result but, the Suzuki family will never get their daughter back.
The anti-stalking laws need to be revised to include social media and allow for stricter punishment for offenders as the number of stalking cases in Japan have been rising in recent years. The other issue when it comes to dealing with stalkers is that majority of the police force are male and the majority of the victims are female. Is it too much to assume that what one of them would not find threatening could be downright terrifying to the person being stalked?
While not exclusively restricted to idols, many celebrities build their fan bases through constant interaction with them, on social media as well as in person. With so many different venues of different sizes scattered all over the place, there are no standards for security. This provides a wide array of unique experiences for fans and performers alike but also presents several risks as well. For the most part, security is far less strict than that for concerts or events in other countries as attendees tend to conduct themselves properly. This allows for performer and audience member to be closer than they would under different circumstances. In the case of artists who go into the audience, they do so with the understanding that no one will try to hurt them when they do.
As seen by the increase of security following the 2014 AKB48 incident, it is possible to provide safety for the performers but, having metal detectors and bag checks at every venue is not possible depending on how big or small the event is. In the case of Mayu Tomita, she was attacked right outside the venue.
Social media has provided many new opportunities for celebrities to promote themselves around the clock and not having to negotiate appearances on mainstream media like television or in magazines. With current technology, it’s possible to generate excitement with as little as a smartphone to post updates (Twitter, Facebook, blogs, etc.), upload pictures to Instagram, broadcast or record video to streaming (YouTube, Ustream, Tweetcast, Periscope, etc.) or sharing sites (YouTube, Nico Nico, Vine, Snapchat,Musical.ly, etc.). While there are some services which allow for reciprocal relationships, for the most part, the celebrity assumes the position of the broadcaster with their followers as the audience, creating a parasocial relationship. It’s almost as if “The Truman Show” (1998) has become our new reality. However, the increase of content for people to consume also increases the chances for delusional behavior.
Even before the mobile Internet came along, delusional fans have turned violent as shown in the case of John Lennon being shot to death by Mark David Chapman in New York City on December 8, 1980. Chapman had originally idolized Lennon but grew increasingly disillusioned and angry with the singer-songwriter, leading to the fatal shooting just hours after the two had met, Lennon having shook Chapman’s hand and signing an album for him. Twenty-four years later in 2004, former Pantera guitarist Dimebag Darrell was shot dead in an Ohio nightclub by Nathan Gale, a former Marine.
It’s an easy target to blame the way that celebrities, Japanese idols in particular, are marketed as the cause of obsessive fan/stalker behavior leading to attacks like the one inflicted on Mayu Tomita. Fans are not normal. Is it normal for Takarazuka fans to line the streets outside of a theater and wait for their favorite musumeyaku or otokoyaku to appear? Was it normal for “Deadheads” to follow The Grateful Dead wherever they went to perform? Is it normal for fans to buy thousands of CDs just to go to an AKB48 handshake event or vote in an election every year?
While there is no clear answer to why some fans can turn into stalkers, mental health issues, social anxiety, frustration, and false feelings of entitlement may be several factors. Satoru Umeta, who was sentenced to 6 years in prison following the Iwate slashing incident, admitted that he had been motivated by jealousy at how much the members of AKB48 made, having been unemployed at the time. As mentioned earlier, Tomita’s attacker had expressed frustration at not being acknowledged by her and felt as if she was looking down on him. In the case of Saaya Suzuki, it was her ex-boyfriend.
How many more people being stalked have to be attacked before something is done to protect them better? Following the stalking and murder of university student Shiori Iino in 1999, the anti-stalking laws were amended to make following and harassment via telephone and fax illegal. They were amended again in 2013 to include e-mail harassment, but the criminalization of harassment and stalking via social media has yet to be included. The world continues to change at a rapid pace and with it come new ways to reach people as well as to hurt them. It is truly unfortunate that young women like Mayu Tomita had to be hurt so badly for the world to take notice. We just hope that it is not a change that she has to pay for with her life.
Sponsored Links
Share This Article
Author Kai Okudara Writer, researcher, photographer, foodie, KSDD
You may also like
Please enable JavaScript to view the comments powered by Disqus.
Disqus |
#ifndef IMAGE_PATTERN_HEX_H_
#define IMAGE_PATTERN_HEX_H_
const uint8_t image_pattern[7168] =
{
0xf0, 0x0f, 0x00, 0x20, 0x11, 0x1c, 0x00, 0x00, 0x0f, 0x22, 0x00, 0x00, 0x57, 0x22, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xc5, 0x20, 0x00, 0x00,
0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00,
0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00,
0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00,
0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00,
0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0xf8, 0xb5, 0xc0, 0x46,
0xf8, 0xbc, 0x08, 0xbc, 0x9e, 0x46, 0x70, 0x47, 0xf1, 0x1b, 0x00, 0x00, 0xf8, 0xb5, 0xc0, 0x46,
0xf8, 0xbc, 0x08, 0xbc, 0x9e, 0x46, 0x70, 0x47, 0xc9, 0x1b, 0x00, 0x00, 0xf0, 0xb5, 0x9b, 0xb0,
0x02, 0x90, 0x0f, 0x00, 0x04, 0x92, 0x03, 0x93, 0x00, 0x28, 0x05, 0xd0, 0x83, 0x69, 0x05, 0x93,
0x00, 0x2b, 0x01, 0xd1, 0x01, 0xf0, 0xa6, 0xf8, 0x96, 0x4b, 0x9f, 0x42, 0x5c, 0xd1, 0x02, 0x9b,
0x5f, 0x68, 0xbb, 0x89, 0x1b, 0x07, 0x63, 0xd5, 0x3b, 0x69, 0x00, 0x2b, 0x60, 0xd0, 0x00, 0x22,
0x08, 0xab, 0x0d, 0x92, 0x20, 0x32, 0x5a, 0x76, 0x10, 0x32, 0x9a, 0x76, 0x04, 0x9e, 0x33, 0x78,
0x00, 0x2b, 0x01, 0xd0, 0x25, 0x2b, 0x5d, 0xd1, 0x04, 0x9b, 0xf4, 0x1a, 0x0c, 0xd0, 0x23, 0x00,
0x04, 0x9a, 0x39, 0x00, 0x02, 0x98, 0x01, 0xf0, 0xcb, 0xf9, 0x43, 0x1c, 0x00, 0xd1, 0xd6, 0xe0,
0x0d, 0x9b, 0x1c, 0x19, 0x04, 0x93, 0x0d, 0x94, 0x33, 0x78, 0x00, 0x2b, 0x00, 0xd1, 0xce, 0xe0,
0x01, 0x22, 0x00, 0x23, 0x08, 0xac, 0x52, 0x42, 0x62, 0x60, 0x04, 0xa9, 0x54, 0x32, 0x52, 0x18,
0x08, 0x93, 0xe3, 0x60, 0xa3, 0x60, 0x01, 0x36, 0x13, 0x70, 0x31, 0x78, 0x05, 0x22, 0x7a, 0x48,
0x01, 0xf0, 0x7e, 0xfc, 0x75, 0x1c, 0x00, 0x28, 0x36, 0xd1, 0x22, 0x68, 0xd3, 0x06, 0x04, 0xd5,
0x53, 0x23, 0x04, 0xa9, 0x5b, 0x18, 0x20, 0x21, 0x19, 0x70, 0x13, 0x07, 0x04, 0xd5, 0x53, 0x23,
0x04, 0xa9, 0x5b, 0x18, 0x2b, 0x21, 0x19, 0x70, 0x33, 0x78, 0x2a, 0x2b, 0x2d, 0xd0, 0x35, 0x00,
0x00, 0x21, 0x0a, 0x20, 0xe3, 0x68, 0x2a, 0x78, 0x6e, 0x1c, 0x30, 0x3a, 0x09, 0x2a, 0x62, 0xd9,
0x00, 0x29, 0x28, 0xd0, 0xe3, 0x60, 0x26, 0xe0, 0x68, 0x4b, 0x9f, 0x42, 0x02, 0xd1, 0x02, 0x9b,
0x9f, 0x68, 0x9e, 0xe7, 0x66, 0x4b, 0x9f, 0x42, 0x9b, 0xd1, 0x02, 0x9b, 0xdf, 0x68, 0x98, 0xe7,
0x39, 0x00, 0x02, 0x98, 0x00, 0xf0, 0x4a, 0xfc, 0x00, 0x28, 0x98, 0xd0, 0x01, 0x20, 0x40, 0x42,
0x1b, 0xb0, 0xf0, 0xbd, 0x01, 0x36, 0x9a, 0xe7, 0x5b, 0x4b, 0x22, 0x68, 0xc0, 0x1a, 0x01, 0x23,
0x83, 0x40, 0x13, 0x43, 0x23, 0x60, 0x2e, 0x00, 0xb7, 0xe7, 0x03, 0x9e, 0x08, 0xce, 0x00, 0x2b,
0x33, 0xdb, 0xe3, 0x60, 0x03, 0x96, 0x2b, 0x78, 0x2e, 0x2b, 0x09, 0xd1, 0x6b, 0x78, 0x2a, 0x2b,
0x39, 0xd1, 0x03, 0x9e, 0x02, 0x35, 0x08, 0xce, 0x00, 0x2b, 0x31, 0xdb, 0x63, 0x60, 0x03, 0x96,
0x4d, 0x4e, 0x29, 0x78, 0x06, 0x36, 0x03, 0x22, 0x30, 0x00, 0x01, 0xf0, 0x21, 0xfc, 0x00, 0x28,
0x06, 0xd0, 0x40, 0x23, 0x80, 0x1b, 0x83, 0x40, 0x22, 0x68, 0x01, 0x35, 0x13, 0x43, 0x23, 0x60,
0x45, 0x48, 0x29, 0x78, 0x6b, 0x1c, 0x06, 0x22, 0x0a, 0x30, 0x04, 0x93, 0x21, 0x76, 0x01, 0xf0,
0x0f, 0xfc, 0x23, 0x7e, 0x00, 0x2b, 0x29, 0xd0, 0x73, 0x2b, 0x4e, 0xd0, 0x52, 0x25, 0x04, 0xaa,
0xad, 0x18, 0x2b, 0x70, 0x01, 0x23, 0x23, 0x61, 0x55, 0xe0, 0x5b, 0x42, 0xe3, 0x60, 0x02, 0x23,
0x13, 0x43, 0x23, 0x60, 0xc6, 0xe7, 0x43, 0x43, 0x01, 0x21, 0x9b, 0x18, 0x35, 0x00, 0x92, 0xe7,
0x01, 0x23, 0x5b, 0x42, 0xca, 0xe7, 0x00, 0x23, 0x0a, 0x20, 0x1a, 0x00, 0x01, 0x35, 0x63, 0x60,
0x29, 0x78, 0x6e, 0x1c, 0x30, 0x39, 0x09, 0x29, 0x03, 0xd9, 0x00, 0x2b, 0xc0, 0xd0, 0x62, 0x60,
0xbe, 0xe7, 0x42, 0x43, 0x01, 0x23, 0x52, 0x18, 0x35, 0x00, 0xf1, 0xe7, 0x53, 0x25, 0x23, 0x61,
0x04, 0xab, 0xed, 0x18, 0x2b, 0x4b, 0x07, 0xaa, 0x00, 0x93, 0x08, 0xa9, 0x3b, 0x00, 0x02, 0x98,
0x00, 0xf0, 0x4a, 0xfc, 0x43, 0x1c, 0x12, 0xd0, 0x0c, 0x9b, 0x2a, 0x00, 0x39, 0x00, 0x02, 0x98,
0x01, 0xf0, 0xfe, 0xf8, 0x43, 0x1c, 0x0a, 0xd0, 0x08, 0x9b, 0x05, 0x93, 0x9b, 0x07, 0x32, 0xd4,
0x07, 0x9a, 0x0b, 0x9b, 0x93, 0x42, 0x00, 0xda, 0x13, 0x00, 0x5a, 0x1c, 0x2d, 0xd1, 0xbb, 0x89,
0x5b, 0x06, 0x00, 0xd5, 0x72, 0xe7, 0x0d, 0x98, 0x72, 0xe7, 0x03, 0x9e, 0x62, 0x68, 0x20, 0xce,
0x00, 0x21, 0x28, 0x00, 0x01, 0xf0, 0xb4, 0xfb, 0x00, 0x28, 0x01, 0xd0, 0x40, 0x1b, 0x60, 0x60,
0x09, 0x9b, 0x03, 0x96, 0x0c, 0x93, 0x53, 0x23, 0x04, 0xaa, 0x9b, 0x18, 0x00, 0x22, 0x1a, 0x70,
0xc8, 0xe7, 0x29, 0x22, 0x04, 0xa9, 0x01, 0x23, 0x52, 0x18, 0x02, 0x98, 0x39, 0x00, 0x01, 0xf0,
0xcf, 0xf8, 0x43, 0x1c, 0xdb, 0xd0, 0x01, 0x34, 0x0b, 0x9b, 0x07, 0x9a, 0x05, 0x93, 0x9b, 0x1a,
0x9c, 0x42, 0xee, 0xdb, 0xcc, 0xe7, 0x00, 0x24, 0xf6, 0xe7, 0x0d, 0x9a, 0xd3, 0x18, 0x05, 0x92,
0x0d, 0x93, 0xe3, 0xe6, 0x00, 0x02, 0x00, 0x20, 0xa6, 0x22, 0x00, 0x00, 0x20, 0x02, 0x00, 0x20,
0xe0, 0x01, 0x00, 0x20, 0xa1, 0x1c, 0x00, 0x00, 0x40, 0x02, 0x00, 0x20, 0x1c, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x5c, 0x02, 0x00, 0x20, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x7c, 0x02, 0x00, 0x20, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x68, 0x02, 0x00, 0x20,
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x6c, 0x02, 0x00, 0x20, 0x04, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x80, 0x01, 0x00, 0x20, 0x80, 0x01, 0x00, 0x20, 0x60, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x20, 0x20, 0x02, 0x00, 0x20,
0xe0, 0x01, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xe0, 0x01, 0x00, 0x20, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x02, 0x00, 0x20, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x02, 0x00, 0x20,
0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x20, 0x6c, 0x01, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x29, 0x22, 0x00, 0x00, 0xe9, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x38, 0x11, 0x00, 0x00, 0x25, 0x23, 0x00, 0x00, 0x24, 0x23, 0x00, 0x00, 0x24, 0x23, 0x00, 0x00,
0x24, 0x23, 0x00, 0x00, 0x24, 0x23, 0x00, 0x00, 0x24, 0x23, 0x00, 0x00, 0x24, 0x23, 0x00, 0x00,
0x24, 0x23, 0x00, 0x00, 0x24, 0x23, 0x00, 0x00, 0x24, 0x23, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x01, 0x00, 0x41, 0x53,
0x43, 0x49, 0x49, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x41, 0x53,
0x43, 0x49, 0x49, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x70, 0x02, 0x00, 0x20, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x74, 0x02, 0x00, 0x20,
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x20, 0x04, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x78, 0x02, 0x00, 0x20, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0xb5, 0x1e, 0x00, 0x4f, 0x4b, 0x85, 0xb0,
0x1d, 0x68, 0x04, 0x00, 0x0f, 0x00, 0x00, 0x92, 0x00, 0x2d, 0x05, 0xd0, 0xab, 0x69, 0x00, 0x2b,
0x02, 0xd1, 0x28, 0x00, 0x00, 0xf0, 0x1e, 0xfe, 0x49, 0x4b, 0x9c, 0x42, 0x4f, 0xd1, 0x6c, 0x68,
0x00, 0x9b, 0x02, 0x2b, 0x05, 0xd0, 0x01, 0x2b, 0x00, 0xd9, 0x83, 0xe0, 0x00, 0x2e, 0x00, 0xda,
0x80, 0xe0, 0x21, 0x00, 0x28, 0x00, 0x00, 0xf0, 0x6d, 0xfe, 0x61, 0x6b, 0x00, 0x29, 0x08, 0xd0,
0x23, 0x00, 0x44, 0x33, 0x99, 0x42, 0x02, 0xd0, 0x28, 0x00, 0x00, 0xf0, 0xad, 0xfc, 0x00, 0x23,
0x63, 0x63, 0x00, 0x23, 0xa3, 0x61, 0x63, 0x60, 0xa3, 0x89, 0x1b, 0x06, 0x03, 0xd5, 0x21, 0x69,
0x28, 0x00, 0x00, 0xf0, 0xa1, 0xfc, 0xa3, 0x89, 0x36, 0x4a, 0x13, 0x40, 0xa3, 0x81, 0x00, 0x9b,
0x02, 0x2b, 0x59, 0xd0, 0x03, 0xab, 0x02, 0xaa, 0x21, 0x00, 0x28, 0x00, 0x00, 0xf0, 0x76, 0xfe,
0xa3, 0x89, 0x03, 0x43, 0xa3, 0x81, 0x00, 0x2e, 0x23, 0xd1, 0x02, 0x9e, 0x30, 0x00, 0x01, 0xf0,
0x17, 0xfa, 0x07, 0x1e, 0x43, 0xd1, 0x02, 0x9b, 0x01, 0x93, 0xb3, 0x42, 0x39, 0xd1, 0x01, 0x20,
0x40, 0x42, 0x02, 0x23, 0xa2, 0x89, 0x13, 0x43, 0xa3, 0x81, 0x00, 0x23, 0xa3, 0x60, 0x23, 0x00,
0x47, 0x33, 0x23, 0x60, 0x23, 0x61, 0x01, 0x23, 0x63, 0x61, 0x05, 0xb0, 0xf0, 0xbd, 0x22, 0x4b,
0x9c, 0x42, 0x01, 0xd1, 0xac, 0x68, 0xab, 0xe7, 0x20, 0x4b, 0x9c, 0x42, 0xa8, 0xd1, 0xec, 0x68,
0xa6, 0xe7, 0x00, 0x2f, 0xda, 0xd0, 0xab, 0x69, 0x00, 0x2b, 0x02, 0xd1, 0x28, 0x00, 0x00, 0xf0,
0xb9, 0xfd, 0x00, 0x9b, 0x01, 0x2b, 0x03, 0xd1, 0xa3, 0x89, 0x00, 0x9a, 0x1a, 0x43, 0xa2, 0x81,
0x08, 0x20, 0xa3, 0x89, 0x27, 0x60, 0x27, 0x61, 0x66, 0x61, 0x18, 0x40, 0x18, 0xd0, 0x01, 0x20,
0x18, 0x40, 0x00, 0x23, 0x98, 0x42, 0x11, 0xd0, 0x76, 0x42, 0xa3, 0x60, 0xa6, 0x61, 0x18, 0x00,
0xd3, 0xe7, 0x01, 0x98, 0x01, 0xf0, 0xd4, 0xf9, 0x01, 0x9e, 0x07, 0x1e, 0xbf, 0xd0, 0x80, 0x23,
0xa2, 0x89, 0x13, 0x43, 0xa3, 0x81, 0xd6, 0xe7, 0x00, 0x20, 0xba, 0xe7, 0xa6, 0x60, 0xc4, 0xe7,
0xa0, 0x60, 0xc2, 0xe7, 0x01, 0x20, 0x40, 0x42, 0xbf, 0xe7, 0xc0, 0x46, 0x6c, 0x02, 0x00, 0x20,
0x00, 0x02, 0x00, 0x20, 0x5c, 0xf3, 0xff, 0xff, 0x20, 0x02, 0x00, 0x20, 0xe0, 0x01, 0x00, 0x20,
0x00, 0x22, 0x43, 0x08, 0x8b, 0x42, 0x74, 0xd3, 0x03, 0x09, 0x8b, 0x42, 0x5f, 0xd3, 0x03, 0x0a,
0x8b, 0x42, 0x44, 0xd3, 0x03, 0x0b, 0x8b, 0x42, 0x28, 0xd3, 0x03, 0x0c, 0x8b, 0x42, 0x0d, 0xd3,
0xff, 0x22, 0x09, 0x02, 0x12, 0xba, 0x03, 0x0c, 0x8b, 0x42, 0x02, 0xd3, 0x12, 0x12, 0x09, 0x02,
0x65, 0xd0, 0x03, 0x0b, 0x8b, 0x42, 0x19, 0xd3, 0x00, 0xe0, 0x09, 0x0a, 0xc3, 0x0b, 0x8b, 0x42,
0x01, 0xd3, 0xcb, 0x03, 0xc0, 0x1a, 0x52, 0x41, 0x83, 0x0b, 0x8b, 0x42, 0x01, 0xd3, 0x8b, 0x03,
0xc0, 0x1a, 0x52, 0x41, 0x43, 0x0b, 0x8b, 0x42, 0x01, 0xd3, 0x4b, 0x03, 0xc0, 0x1a, 0x52, 0x41,
0x03, 0x0b, 0x8b, 0x42, 0x01, 0xd3, 0x0b, 0x03, 0xc0, 0x1a, 0x52, 0x41, 0xc3, 0x0a, 0x8b, 0x42,
0x01, 0xd3, 0xcb, 0x02, 0xc0, 0x1a, 0x52, 0x41, 0x83, 0x0a, 0x8b, 0x42, 0x01, 0xd3, 0x8b, 0x02,
0xc0, 0x1a, 0x52, 0x41, 0x43, 0x0a, 0x8b, 0x42, 0x01, 0xd3, 0x4b, 0x02, 0xc0, 0x1a, 0x52, 0x41,
0x03, 0x0a, 0x8b, 0x42, 0x01, 0xd3, 0x0b, 0x02, 0xc0, 0x1a, 0x52, 0x41, 0xcd, 0xd2, 0xc3, 0x09,
0x8b, 0x42, 0x01, 0xd3, 0xcb, 0x01, 0xc0, 0x1a, 0x52, 0x41, 0x83, 0x09, 0x8b, 0x42, 0x01, 0xd3,
0x8b, 0x01, 0xc0, 0x1a, 0x52, 0x41, 0x43, 0x09, 0x8b, 0x42, 0x01, 0xd3, 0x4b, 0x01, 0xc0, 0x1a,
0x52, 0x41, 0x03, 0x09, 0x8b, 0x42, 0x01, 0xd3, 0x0b, 0x01, 0xc0, 0x1a, 0x52, 0x41, 0xc3, 0x08,
0x8b, 0x42, 0x01, 0xd3, 0xcb, 0x00, 0xc0, 0x1a, 0x52, 0x41, 0x83, 0x08, 0x8b, 0x42, 0x01, 0xd3,
0x8b, 0x00, 0xc0, 0x1a, 0x52, 0x41, 0x43, 0x08, 0x8b, 0x42, 0x01, 0xd3, 0x4b, 0x00, 0xc0, 0x1a,
0x52, 0x41, 0x41, 0x1a, 0x00, 0xd2, 0x01, 0x46, 0x52, 0x41, 0x10, 0x46, 0x70, 0x47, 0xff, 0xe7,
0x01, 0xb5, 0x00, 0x20, 0x01, 0xf0, 0xac, 0xf9, 0x02, 0xbd, 0xc0, 0x46, 0x00, 0x29, 0xf7, 0xd0,
0x76, 0xe7, 0x70, 0x47, 0xf7, 0xb5, 0x8a, 0x89, 0x05, 0x00, 0x0c, 0x00, 0x13, 0x07, 0x5d, 0xd4,
0x4b, 0x68, 0x00, 0x2b, 0x04, 0xdc, 0x0b, 0x6c, 0x00, 0x2b, 0x01, 0xdc, 0x00, 0x20, 0xfe, 0xbd,
0xe7, 0x6a, 0x00, 0x2f, 0xfa, 0xd0, 0x00, 0x23, 0x2e, 0x68, 0x2b, 0x60, 0x80, 0x23, 0x5b, 0x01,
0x1a, 0x40, 0x34, 0xd0, 0x60, 0x6d, 0xa3, 0x89, 0x5b, 0x07, 0x06, 0xd5, 0x63, 0x68, 0xc0, 0x1a,
0x63, 0x6b, 0x00, 0x2b, 0x01, 0xd0, 0x23, 0x6c, 0xc0, 0x1a, 0x02, 0x00, 0x21, 0x6a, 0x00, 0x23,
0x28, 0x00, 0xe7, 0x6a, 0xb8, 0x47, 0xa1, 0x89, 0x43, 0x1c, 0x06, 0xd1, 0x2b, 0x68, 0x1d, 0x2b,
0x31, 0xd8, 0x2c, 0x4a, 0xda, 0x40, 0xd3, 0x07, 0x2d, 0xd5, 0x00, 0x23, 0x63, 0x60, 0x23, 0x69,
0x23, 0x60, 0xcb, 0x04, 0x05, 0xd5, 0x43, 0x1c, 0x02, 0xd1, 0x2b, 0x68, 0x00, 0x2b, 0x00, 0xd1,
0x60, 0x65, 0x61, 0x6b, 0x2e, 0x60, 0x00, 0x29, 0xc8, 0xd0, 0x23, 0x00, 0x44, 0x33, 0x99, 0x42,
0x02, 0xd0, 0x28, 0x00, 0x00, 0xf0, 0x58, 0xfb, 0x00, 0x20, 0x60, 0x63, 0xbf, 0xe7, 0x01, 0x23,
0x21, 0x6a, 0x28, 0x00, 0xb8, 0x47, 0x43, 0x1c, 0xc5, 0xd1, 0x2b, 0x68, 0x00, 0x2b, 0xc2, 0xd0,
0x1d, 0x2b, 0x01, 0xd0, 0x16, 0x2b, 0x01, 0xd1, 0x2e, 0x60, 0xaf, 0xe7, 0x40, 0x23, 0xa2, 0x89,
0x13, 0x43, 0xa3, 0x81, 0xab, 0xe7, 0x40, 0x23, 0x0b, 0x43, 0xfa, 0xe7, 0x0f, 0x69, 0x00, 0x2f,
0xa4, 0xd0, 0x0b, 0x68, 0x0f, 0x60, 0xdb, 0x1b, 0x01, 0x93, 0x00, 0x23, 0x92, 0x07, 0x00, 0xd1,
0x4b, 0x69, 0xa3, 0x60, 0x01, 0x9b, 0x00, 0x2b, 0x00, 0xdc, 0x97, 0xe7, 0x01, 0x9b, 0x3a, 0x00,
0x21, 0x6a, 0x28, 0x00, 0xa6, 0x6a, 0xb0, 0x47, 0x00, 0x28, 0x06, 0xdc, 0x40, 0x23, 0x01, 0x20,
0xa2, 0x89, 0x40, 0x42, 0x13, 0x43, 0xa3, 0x81, 0x89, 0xe7, 0x01, 0x9b, 0x3f, 0x18, 0x1b, 0x1a,
0x01, 0x93, 0xe7, 0xe7, 0x01, 0x00, 0x40, 0x20, 0x00, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
0x20, 0x20, 0x28, 0x28, 0x28, 0x28, 0x28, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x88, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10,
0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
0x04, 0x04, 0x04, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42,
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
0x02, 0x02, 0x02, 0x02, 0x10, 0x10, 0x10, 0x10, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70, 0x47, 0x36, 0x4b, 0x70, 0xb5,
0x1d, 0x68, 0x06, 0x00, 0x0c, 0x00, 0x00, 0x2d, 0x05, 0xd0, 0xab, 0x69, 0x00, 0x2b, 0x02, 0xd1,
0x28, 0x00, 0x00, 0xf0, 0xdf, 0xfb, 0x31, 0x4b, 0x9c, 0x42, 0x0f, 0xd1, 0x6c, 0x68, 0x0c, 0x23,
0xe2, 0x5e, 0x93, 0xb2, 0x19, 0x07, 0x2d, 0xd4, 0xd9, 0x06, 0x11, 0xd4, 0x09, 0x23, 0x01, 0x20,
0x33, 0x60, 0x37, 0x33, 0x13, 0x43, 0xa3, 0x81, 0x40, 0x42, 0x70, 0xbd, 0x28, 0x4b, 0x9c, 0x42,
0x01, 0xd1, 0xac, 0x68, 0xeb, 0xe7, 0x27, 0x4b, 0x9c, 0x42, 0xe8, 0xd1, 0xec, 0x68, 0xe6, 0xe7,
0x5b, 0x07, 0x13, 0xd5, 0x61, 0x6b, 0x00, 0x29, 0x08, 0xd0, 0x23, 0x00, 0x44, 0x33, 0x99, 0x42,
0x02, 0xd0, 0x30, 0x00, 0x00, 0xf0, 0x60, 0xfa, 0x00, 0x23, 0x63, 0x63, 0x24, 0x22, 0xa3, 0x89,
0x93, 0x43, 0xa3, 0x81, 0x00, 0x23, 0x63, 0x60, 0x23, 0x69, 0x23, 0x60, 0x08, 0x23, 0xa2, 0x89,
0x13, 0x43, 0xa3, 0x81, 0x23, 0x69, 0x00, 0x2b, 0x0b, 0xd1, 0xa0, 0x21, 0x80, 0x22, 0xa3, 0x89,
0x89, 0x00, 0x92, 0x00, 0x0b, 0x40, 0x93, 0x42, 0x03, 0xd0, 0x21, 0x00, 0x30, 0x00, 0x00, 0xf0,
0xf9, 0xf9, 0x01, 0x23, 0xa2, 0x89, 0x13, 0x40, 0x11, 0xd0, 0x00, 0x23, 0xa3, 0x60, 0x63, 0x69,
0x5b, 0x42, 0xa3, 0x61, 0x00, 0x20, 0x23, 0x69, 0x83, 0x42, 0xbe, 0xd1, 0x0c, 0x23, 0xe2, 0x5e,
0x13, 0x06, 0xba, 0xd5, 0x40, 0x23, 0x13, 0x43, 0xa3, 0x81, 0x01, 0x38, 0xb5, 0xe7, 0x92, 0x07,
0x00, 0xd4, 0x63, 0x69, 0xa3, 0x60, 0xed, 0xe7, 0x6c, 0x02, 0x00, 0x20, 0x00, 0x02, 0x00, 0x20,
0x20, 0x02, 0x00, 0x20, 0xe0, 0x01, 0x00, 0x20, 0xf7, 0xb5, 0x15, 0x00, 0x01, 0x93, 0x8a, 0x68,
0x0b, 0x69, 0x00, 0x90, 0x0c, 0x00, 0x93, 0x42, 0x00, 0xda, 0x13, 0x00, 0x22, 0x00, 0x2b, 0x60,
0x43, 0x32, 0x12, 0x78, 0x00, 0x2a, 0x01, 0xd0, 0x01, 0x33, 0x2b, 0x60, 0x23, 0x68, 0x9b, 0x06,
0x02, 0xd5, 0x2b, 0x68, 0x02, 0x33, 0x2b, 0x60, 0x06, 0x27, 0x23, 0x68, 0x1f, 0x40, 0x27, 0xd0,
0x23, 0x00, 0x43, 0x33, 0x1b, 0x78, 0x5a, 0x1e, 0x93, 0x41, 0x22, 0x68, 0x92, 0x06, 0x30, 0xd4,
0x22, 0x00, 0x01, 0x99, 0x43, 0x32, 0x00, 0x98, 0x08, 0x9e, 0xb0, 0x47, 0x43, 0x1c, 0x25, 0xd0,
0x06, 0x23, 0x20, 0x68, 0x2a, 0x68, 0xe1, 0x68, 0x03, 0x40, 0x00, 0x25, 0x04, 0x2b, 0x03, 0xd1,
0x8d, 0x1a, 0xeb, 0x43, 0xdb, 0x17, 0x1d, 0x40, 0xa3, 0x68, 0x22, 0x69, 0x93, 0x42, 0x01, 0xdd,
0x9b, 0x1a, 0xed, 0x18, 0x00, 0x27, 0xbd, 0x42, 0x20, 0xd1, 0x00, 0x20, 0x10, 0xe0, 0x01, 0x37,
0xe3, 0x68, 0x2a, 0x68, 0x9b, 0x1a, 0xbb, 0x42, 0xd2, 0xdd, 0x22, 0x00, 0x01, 0x23, 0x19, 0x32,
0x01, 0x99, 0x00, 0x98, 0x08, 0x9e, 0xb0, 0x47, 0x43, 0x1c, 0xf0, 0xd1, 0x01, 0x20, 0x40, 0x42,
0xfe, 0xbd, 0x30, 0x20, 0xe1, 0x18, 0x43, 0x31, 0x08, 0x70, 0x21, 0x00, 0x5a, 0x1c, 0x45, 0x31,
0x09, 0x78, 0xa2, 0x18, 0x43, 0x32, 0x02, 0x33, 0x11, 0x70, 0xc1, 0xe7, 0x22, 0x00, 0x01, 0x23,
0x1a, 0x32, 0x01, 0x99, 0x00, 0x98, 0x08, 0x9e, 0xb0, 0x47, 0x43, 0x1c, 0xe6, 0xd0, 0x01, 0x37,
0xd1, 0xe7, 0x70, 0x47, 0xf8, 0xb5, 0x05, 0x00, 0x0e, 0x00, 0x14, 0x00, 0x00, 0x28, 0x04, 0xd0,
0x83, 0x69, 0x00, 0x2b, 0x01, 0xd1, 0x00, 0xf0, 0xfd, 0xfa, 0x29, 0x4b, 0x9c, 0x42, 0x34, 0xd1,
0x6c, 0x68, 0xa3, 0x69, 0xa3, 0x60, 0xa3, 0x89, 0x1b, 0x07, 0x38, 0xd5, 0x23, 0x69, 0x00, 0x2b,
0x35, 0xd0, 0x0c, 0x23, 0xe2, 0x5e, 0x80, 0x23, 0x9b, 0x01, 0xf7, 0xb2, 0xf6, 0xb2, 0x1a, 0x42,
0x36, 0xd0, 0x23, 0x68, 0x22, 0x69, 0x98, 0x1a, 0x63, 0x69, 0x83, 0x42, 0x05, 0xdc, 0x21, 0x00,
0x28, 0x00, 0x00, 0xf0, 0x3f, 0xfb, 0x00, 0x28, 0x27, 0xd1, 0xa3, 0x68, 0x01, 0x30, 0x01, 0x3b,
0xa3, 0x60, 0x23, 0x68, 0x5a, 0x1c, 0x22, 0x60, 0x1f, 0x70, 0x63, 0x69, 0x83, 0x42, 0x04, 0xd0,
0xa3, 0x89, 0xdb, 0x07, 0x07, 0xd5, 0x0a, 0x2e, 0x05, 0xd1, 0x21, 0x00, 0x28, 0x00, 0x00, 0xf0,
0x29, 0xfb, 0x00, 0x28, 0x11, 0xd1, 0x30, 0x00, 0xf8, 0xbd, 0x0e, 0x4b, 0x9c, 0x42, 0x01, 0xd1,
0xac, 0x68, 0xc6, 0xe7, 0x0c, 0x4b, 0x9c, 0x42, 0xc3, 0xd1, 0xec, 0x68, 0xc1, 0xe7, 0x21, 0x00,
0x28, 0x00, 0xff, 0xf7, 0xcb, 0xfe, 0x00, 0x28, 0xc3, 0xd0, 0x01, 0x26, 0x76, 0x42, 0xea, 0xe7,
0x13, 0x43, 0xa3, 0x81, 0x22, 0x6e, 0x05, 0x4b, 0x13, 0x40, 0x23, 0x66, 0xc1, 0xe7, 0xc0, 0x46,
0x00, 0x02, 0x00, 0x20, 0x20, 0x02, 0x00, 0x20, 0xe0, 0x01, 0x00, 0x20, 0xff, 0xdf, 0xff, 0xff,
0xf8, 0xb5, 0x03, 0x23, 0xcd, 0x1c, 0x9d, 0x43, 0x08, 0x35, 0x06, 0x00, 0x0c, 0x2d, 0x1f, 0xd2,
0x0c, 0x25, 0xa9, 0x42, 0x1e, 0xd8, 0x30, 0x00, 0xff, 0xf7, 0xa7, 0xfe, 0x25, 0x49, 0x0a, 0x68,
0x14, 0x00, 0x00, 0x2c, 0x1a, 0xd1, 0x24, 0x4f, 0x3b, 0x68, 0x00, 0x2b, 0x04, 0xd1, 0x21, 0x00,
0x30, 0x00, 0x00, 0xf0, 0x03, 0xfe, 0x38, 0x60, 0x29, 0x00, 0x30, 0x00, 0x00, 0xf0, 0xfe, 0xfd,
0x43, 0x1c, 0x2a, 0xd1, 0x0c, 0x23, 0x30, 0x00, 0x33, 0x60, 0x00, 0xf0, 0x31, 0xff, 0x03, 0xe0,
0x00, 0x2d, 0xde, 0xda, 0x0c, 0x23, 0x33, 0x60, 0x00, 0x20, 0xf8, 0xbd, 0x23, 0x68, 0x5b, 0x1b,
0x18, 0xd4, 0x0b, 0x2b, 0x03, 0xd9, 0x23, 0x60, 0xe4, 0x18, 0x25, 0x60, 0x03, 0xe0, 0x63, 0x68,
0xa2, 0x42, 0x0d, 0xd1, 0x0b, 0x60, 0x30, 0x00, 0x00, 0xf0, 0x1a, 0xff, 0x20, 0x00, 0x07, 0x22,
0x0b, 0x30, 0x23, 0x1d, 0x90, 0x43, 0xc2, 0x1a, 0xe7, 0xd0, 0x1b, 0x1a, 0xa3, 0x50, 0xe4, 0xe7,
0x53, 0x60, 0xf0, 0xe7, 0x22, 0x00, 0x64, 0x68, 0xc3, 0xe7, 0x03, 0x23, 0xc4, 0x1c, 0x9c, 0x43,
0xa0, 0x42, 0xe2, 0xd0, 0x21, 0x1a, 0x30, 0x00, 0x00, 0xf0, 0xc8, 0xfd, 0x43, 0x1c, 0xdc, 0xd1,
0xc8, 0xe7, 0xc0, 0x46, 0x70, 0x02, 0x00, 0x20, 0x74, 0x02, 0x00, 0x20, 0x0d, 0x0a, 0x23, 0x23,
0x23, 0x23, 0x23, 0x23, 0x23, 0x20, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x42, 0x6f,
0x6f, 0x74, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x72, 0x20, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23,
0x0d, 0x0a, 0x00, 0x00, 0x25, 0x73, 0x00, 0x00, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x20,
0x50, 0x72, 0x65, 0x73, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x48, 0x6f, 0x6c, 0x64, 0x20, 0x74,
0x68, 0x65, 0x20, 0x53, 0x77, 0x69, 0x74, 0x63, 0x68, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x2d,
0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x20, 0x42, 0x6f, 0x6f, 0x74, 0x6c, 0x6f, 0x61, 0x64,
0x65, 0x72, 0x20, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x0d, 0x0a, 0x00, 0x00, 0x00, 0x00,
0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x20, 0x42, 0x6f, 0x6f, 0x74, 0x6c, 0x6f, 0x61, 0x64,
0x65, 0x72, 0x20, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x65, 0x64, 0x20, 0x23, 0x23, 0x23,
0x23, 0x23, 0x23, 0x23, 0x0d, 0x0a, 0x00, 0x00, 0x70, 0xb5, 0x1f, 0x4b, 0x1b, 0x78, 0x01, 0x2b,
0x14, 0xd0, 0x00, 0x2b, 0x02, 0xd0, 0x02, 0x2b, 0x20, 0xd0, 0x70, 0xbd, 0x1b, 0x4c, 0x25, 0x00,
0x38, 0x35, 0x21, 0x00, 0x28, 0x00, 0x00, 0xf0, 0xa1, 0xfd, 0x21, 0x00, 0x3c, 0x31, 0x28, 0x00,
0x00, 0xf0, 0x9c, 0xfd, 0x14, 0x4b, 0x01, 0x22, 0x1a, 0x70, 0xee, 0xe7, 0x14, 0x4b, 0x1b, 0x6a,
0x5b, 0x04, 0x02, 0xd4, 0x10, 0x4b, 0x02, 0x22, 0x1a, 0x70, 0xfa, 0x20, 0x80, 0x00, 0x00, 0xf0,
0xd5, 0xfb, 0x0f, 0x4b, 0x80, 0x22, 0x52, 0x02, 0xda, 0x61, 0xde, 0xe7, 0x0d, 0x49, 0x0b, 0x48,
0x04, 0x31, 0x38, 0x30, 0x00, 0xf0, 0x82, 0xfd, 0x80, 0x23, 0x9b, 0x05, 0x0a, 0x4a, 0x1a, 0x60,
0x5a, 0x60, 0x9a, 0x60, 0xda, 0x60, 0xbf, 0xf3, 0x4f, 0x8f, 0x08, 0x4b, 0x08, 0x4a, 0xda, 0x60,
0xbf, 0xf3, 0x4f, 0x8f, 0xc0, 0x46, 0xfd, 0xe7, 0x7c, 0x02, 0x00, 0x20, 0x8c, 0x15, 0x00, 0x00,
0x00, 0x44, 0x00, 0x41, 0x0c, 0x16, 0x00, 0x00, 0x4d, 0x43, 0x48, 0x50, 0x00, 0xed, 0x00, 0xe0,
0x04, 0x00, 0xfa, 0x05, 0xf7, 0xb5, 0x02, 0x26, 0x8b, 0x89, 0x05, 0x00, 0x0c, 0x00, 0x33, 0x42,
0x06, 0xd0, 0x0b, 0x00, 0x47, 0x33, 0x0b, 0x60, 0x0b, 0x61, 0x01, 0x23, 0x4b, 0x61, 0xf7, 0xbd,
0x01, 0xab, 0x6a, 0x46, 0x00, 0xf0, 0x1a, 0xfa, 0x00, 0x99, 0x07, 0x00, 0x28, 0x00, 0xff, 0xf7,
0xe7, 0xfe, 0x00, 0x28, 0x0e, 0xd1, 0x0c, 0x22, 0xa3, 0x5e, 0x9a, 0x05, 0xef, 0xd4, 0x03, 0x22,
0x93, 0x43, 0x1e, 0x43, 0x23, 0x00, 0x47, 0x33, 0x23, 0x60, 0x23, 0x61, 0x01, 0x23, 0xa6, 0x81,
0x63, 0x61, 0xe4, 0xe7, 0x0f, 0x4b, 0xab, 0x62, 0x80, 0x23, 0xa2, 0x89, 0x20, 0x60, 0x13, 0x43,
0xa3, 0x81, 0x00, 0x9b, 0x20, 0x61, 0x63, 0x61, 0x01, 0x9b, 0x00, 0x2b, 0x0d, 0xd0, 0x0e, 0x23,
0xe1, 0x5e, 0x28, 0x00, 0x00, 0xf0, 0x18, 0xfd, 0x00, 0x28, 0x06, 0xd0, 0x03, 0x22, 0xa3, 0x89,
0x93, 0x43, 0x1a, 0x00, 0x01, 0x23, 0x13, 0x43, 0xa3, 0x81, 0xa0, 0x89, 0x38, 0x43, 0xa0, 0x81,
0xc5, 0xe7, 0xc0, 0x46, 0xb9, 0x22, 0x00, 0x00, 0x70, 0xb5, 0x05, 0x00, 0x00, 0x29, 0x10, 0xd0,
0x0c, 0x1f, 0x23, 0x68, 0x00, 0x2b, 0x00, 0xda, 0xe4, 0x18, 0x28, 0x00, 0xff, 0xf7, 0x5d, 0xfd,
0x1d, 0x4a, 0x13, 0x68, 0x00, 0x2b, 0x05, 0xd1, 0x63, 0x60, 0x14, 0x60, 0x28, 0x00, 0x00, 0xf0,
0xf7, 0xfd, 0x70, 0xbd, 0xa3, 0x42, 0x09, 0xd9, 0x21, 0x68, 0x60, 0x18, 0x83, 0x42, 0xf3, 0xd1,
0x18, 0x68, 0x5b, 0x68, 0x41, 0x18, 0x21, 0x60, 0xee, 0xe7, 0x13, 0x00, 0x5a, 0x68, 0x00, 0x2a,
0x01, 0xd0, 0xa2, 0x42, 0xf9, 0xd9, 0x19, 0x68, 0x58, 0x18, 0xa0, 0x42, 0x0b, 0xd1, 0x20, 0x68,
0x09, 0x18, 0x58, 0x18, 0x19, 0x60, 0x82, 0x42, 0xe0, 0xd1, 0x10, 0x68, 0x52, 0x68, 0x41, 0x18,
0x19, 0x60, 0x5a, 0x60, 0xda, 0xe7, 0xa0, 0x42, 0x02, 0xd9, 0x0c, 0x23, 0x2b, 0x60, 0xd5, 0xe7,
0x21, 0x68, 0x60, 0x18, 0x82, 0x42, 0x03, 0xd1, 0x10, 0x68, 0x52, 0x68, 0x41, 0x18, 0x21, 0x60,
0x62, 0x60, 0x5c, 0x60, 0xca, 0xe7, 0xc0, 0x46, 0x70, 0x02, 0x00, 0x20, 0x1c, 0x4b, 0x00, 0x22,
0x9a, 0x61, 0x9a, 0x8c, 0x80, 0x21, 0x8a, 0x43, 0x9a, 0x84, 0x1a, 0x00, 0x10, 0x23, 0xd1, 0x68,
0x0b, 0x42, 0xfc, 0xd0, 0x17, 0x4b, 0x1a, 0x68, 0x92, 0x0e, 0x3f, 0x2a, 0x26, 0xd0, 0x16, 0x4b,
0x1b, 0x68, 0x9b, 0x05, 0x9b, 0x0d, 0x92, 0x02, 0x13, 0x43, 0x11, 0x4a, 0x93, 0x62, 0x10, 0x23,
0xd1, 0x68, 0x0b, 0x42, 0xfc, 0xd0, 0x0e, 0x4b, 0x02, 0x22, 0x9a, 0x84, 0x1a, 0x00, 0x10, 0x23,
0xd1, 0x68, 0x0b, 0x42, 0xfc, 0xd0, 0x0d, 0x4b, 0x0d, 0x4a, 0x5a, 0x60, 0x1a, 0x00, 0x53, 0x78,
0x5b, 0xb2, 0x00, 0x2b, 0xfb, 0xdb, 0x09, 0x4b, 0x0a, 0x4a, 0x5a, 0x80, 0x0a, 0x4b, 0x88, 0x22,
0x52, 0x00, 0x1a, 0x62, 0x02, 0x4b, 0x00, 0x22, 0x1a, 0x62, 0x70, 0x47, 0x20, 0x3a, 0xd6, 0xe7,
0x00, 0x08, 0x00, 0x40, 0x24, 0x60, 0x80, 0x00, 0x28, 0x60, 0x80, 0x00, 0x00, 0x0c, 0x00, 0x40,
0x00, 0x07, 0x01, 0x00, 0x10, 0x40, 0x00, 0x00, 0x00, 0x04, 0x00, 0x40, 0xf0, 0xb5, 0xc6, 0x46,
0x1f, 0x4a, 0x20, 0x4b, 0x00, 0xb5, 0x00, 0x21, 0x93, 0x42, 0x03, 0xd2, 0x19, 0x70, 0x01, 0x33,
0x93, 0x42, 0xfb, 0xd1, 0x0b, 0x23, 0x1c, 0x4a, 0x9c, 0x46, 0x08, 0x3b, 0x98, 0x46, 0x13, 0x68,
0x54, 0x68, 0x19, 0x00, 0x00, 0x26, 0x21, 0x43, 0x11, 0xd0, 0x17, 0x00, 0x91, 0x68, 0x0c, 0x37,
0x00, 0x29, 0x0f, 0xd1, 0x1a, 0x19, 0x00, 0x2c, 0x03, 0xd0, 0x1e, 0x70, 0x01, 0x33, 0x93, 0x42,
0xfb, 0xd1, 0x3a, 0x00, 0x13, 0x68, 0x54, 0x68, 0x19, 0x00, 0x21, 0x43, 0xed, 0xd1, 0x04, 0xbc,
0x90, 0x46, 0xf0, 0xbd, 0x61, 0x46, 0xc9, 0x1a, 0x1d, 0x19, 0x51, 0x18, 0x00, 0x2c, 0x05, 0xd0,
0x5a, 0x1c, 0x50, 0x5c, 0x18, 0x70, 0x13, 0x00, 0x95, 0x42, 0xf9, 0xd1, 0x43, 0x46, 0x23, 0x40,
0x5a, 0x1e, 0x93, 0x41, 0xa4, 0x08, 0x1c, 0x19, 0xa4, 0x00, 0x3f, 0x19, 0x3a, 0x00, 0xe1, 0xe7,
0x14, 0x00, 0x00, 0x20, 0x10, 0x00, 0x00, 0x20, 0x38, 0x0b, 0x00, 0x00, 0xf8, 0xb5, 0x1c, 0x4b,
0x07, 0x00, 0x1e, 0x68, 0xb3, 0x69, 0x00, 0x2b, 0x02, 0xd1, 0x30, 0x00, 0x00, 0xf0, 0x72, 0xf8,
0x48, 0x36, 0xb4, 0x68, 0x73, 0x68, 0x01, 0x3b, 0x04, 0xd5, 0x33, 0x68, 0x00, 0x2b, 0x1c, 0xd0,
0x36, 0x68, 0xf6, 0xe7, 0x0c, 0x22, 0xa5, 0x5e, 0x00, 0x2d, 0x14, 0xd1, 0x20, 0x00, 0x11, 0x4b,
0x25, 0x66, 0x25, 0x60, 0x65, 0x60, 0xa5, 0x60, 0xe3, 0x60, 0x25, 0x61, 0x65, 0x61, 0xa5, 0x61,
0x08, 0x22, 0x29, 0x00, 0x58, 0x30, 0x00, 0xf0, 0xb7, 0xfc, 0x65, 0x63, 0xa5, 0x63, 0xa5, 0x64,
0xe5, 0x64, 0x20, 0x00, 0xf8, 0xbd, 0x64, 0x34, 0xdd, 0xe7, 0x04, 0x21, 0x38, 0x00, 0x00, 0xf0,
0xd1, 0xfa, 0x04, 0x1e, 0x30, 0x60, 0xdb, 0xd1, 0x0c, 0x23, 0x3b, 0x60, 0xf1, 0xe7, 0xc0, 0x46,
0x74, 0x23, 0x00, 0x00, 0x01, 0x00, 0xff, 0xff, 0xf0, 0xb5, 0x85, 0xb0, 0x01, 0x90, 0x0e, 0x00,
0x14, 0x00, 0x00, 0xf0, 0x91, 0xfc, 0x01, 0x28, 0x0b, 0xd1, 0x73, 0x1e, 0xfe, 0x2b, 0x08, 0xd8,
0x05, 0x00, 0x02, 0xab, 0x1e, 0x71, 0x00, 0x27, 0xaf, 0x42, 0x12, 0xd1, 0x30, 0x00, 0x05, 0xb0,
0xf0, 0xbd, 0x23, 0x00, 0x32, 0x00, 0x58, 0x33, 0x03, 0xa9, 0x01, 0x98, 0x00, 0xf0, 0xd6, 0xfa,
0x05, 0x00, 0x43, 0x1c, 0xef, 0xd1, 0x40, 0x23, 0xa2, 0x89, 0x13, 0x43, 0xa3, 0x81, 0x06, 0x00,
0xec, 0xe7, 0x03, 0xab, 0xd9, 0x5d, 0xa3, 0x68, 0x01, 0x3b, 0xa3, 0x60, 0x00, 0x2b, 0x04, 0xda,
0xa2, 0x69, 0x93, 0x42, 0x07, 0xdb, 0x0a, 0x29, 0x05, 0xd0, 0x23, 0x68, 0x5a, 0x1c, 0x22, 0x60,
0x19, 0x70, 0x01, 0x37, 0xd8, 0xe7, 0x22, 0x00, 0x01, 0x98, 0xff, 0xf7, 0xfb, 0xfc, 0x43, 0x1c,
0xf7, 0xd1, 0xe4, 0xe7, 0x83, 0x69, 0x13, 0xb5, 0x04, 0x00, 0x00, 0x2b, 0x28, 0xd1, 0x83, 0x64,
0xc3, 0x64, 0x03, 0x65, 0x13, 0x4b, 0x14, 0x4a, 0x1b, 0x68, 0x82, 0x62, 0x01, 0x93, 0x98, 0x42,
0x01, 0xd1, 0x01, 0x23, 0x83, 0x61, 0x20, 0x00, 0xff, 0xf7, 0x70, 0xff, 0x60, 0x60, 0x20, 0x00,
0xff, 0xf7, 0x6c, 0xff, 0xa0, 0x60, 0x20, 0x00, 0xff, 0xf7, 0x68, 0xff, 0x00, 0x22, 0xe0, 0x60,
0x04, 0x21, 0x60, 0x68, 0x00, 0xf0, 0x48, 0xf9, 0x01, 0x22, 0x09, 0x21, 0xa0, 0x68, 0x00, 0xf0,
0x43, 0xf9, 0x02, 0x22, 0x12, 0x21, 0xe0, 0x68, 0x00, 0xf0, 0x3e, 0xf9, 0x01, 0x23, 0xa3, 0x61,
0x13, 0xbd, 0xc0, 0x46, 0x74, 0x23, 0x00, 0x00, 0xb9, 0x22, 0x00, 0x00, 0x70, 0xb5, 0x04, 0x00,
0x0d, 0x00, 0x00, 0x20, 0x00, 0x2c, 0x21, 0xd0, 0x00, 0xf0, 0x36, 0xfa, 0x00, 0x2d, 0x18, 0xd0,
0x00, 0x20, 0x0f, 0x4a, 0x04, 0x21, 0x07, 0x26, 0x05, 0xe0, 0x13, 0x8d, 0x23, 0x80, 0x02, 0x34,
0x01, 0x30, 0x85, 0x42, 0x0e, 0xd0, 0x13, 0x7e, 0x0b, 0x42, 0xfc, 0xd0, 0x53, 0x8b, 0x33, 0x42,
0x08, 0xd1, 0x53, 0x68, 0x33, 0x40, 0x01, 0x2b, 0xef, 0xd0, 0x13, 0x8d, 0x23, 0x70, 0x01, 0x34,
0xee, 0xe7, 0x28, 0x00, 0x28, 0x1a, 0x45, 0x42, 0x68, 0x41, 0xc0, 0xb2, 0x70, 0xbd, 0xc0, 0x46,
0x00, 0x10, 0x00, 0x42, 0x0b, 0x69, 0x70, 0xb5, 0x05, 0x00, 0x0c, 0x00, 0x00, 0x2b, 0x01, 0xd1,
0x00, 0x20, 0x70, 0xbd, 0x00, 0x28, 0x04, 0xd0, 0x83, 0x69, 0x00, 0x2b, 0x01, 0xd1, 0xff, 0xf7,
0x91, 0xff, 0x0b, 0x4b, 0x9c, 0x42, 0x09, 0xd1, 0x6c, 0x68, 0x0c, 0x22, 0xa3, 0x5e, 0x00, 0x2b,
0xee, 0xd0, 0x21, 0x00, 0x28, 0x00, 0xff, 0xf7, 0x8d, 0xfa, 0xea, 0xe7, 0x05, 0x4b, 0x9c, 0x42,
0x01, 0xd1, 0xac, 0x68, 0xf1, 0xe7, 0x04, 0x4b, 0x9c, 0x42, 0xee, 0xd1, 0xec, 0x68, 0xec, 0xe7,
0x00, 0x02, 0x00, 0x20, 0x20, 0x02, 0x00, 0x20, 0xe0, 0x01, 0x00, 0x20, 0x70, 0xb5, 0x0e, 0x00,
0x1d, 0x00, 0x0e, 0x23, 0xc9, 0x5e, 0x90, 0xb0, 0x14, 0x00, 0x00, 0x29, 0x07, 0xda, 0x00, 0x23,
0x2b, 0x60, 0xb3, 0x89, 0x1b, 0x06, 0x11, 0xd4, 0x80, 0x23, 0xdb, 0x00, 0x0f, 0xe0, 0x01, 0xaa,
0x00, 0xf0, 0x00, 0xfb, 0x00, 0x28, 0xf2, 0xdb, 0xf0, 0x22, 0x02, 0x9b, 0x12, 0x02, 0x13, 0x40,
0x05, 0x4a, 0x9b, 0x18, 0x5a, 0x42, 0x53, 0x41, 0x2b, 0x60, 0xed, 0xe7, 0x40, 0x23, 0x00, 0x20,
0x23, 0x60, 0x10, 0xb0, 0x70, 0xbd, 0xc0, 0x46, 0x00, 0xe0, 0xff, 0xff, 0x10, 0xb5, 0x10, 0x4b,
0x06, 0x22, 0x5a, 0x60, 0x0f, 0x4c, 0x23, 0x68, 0x58, 0x68, 0x00, 0x21, 0x00, 0xf0, 0x82, 0xfb,
0x23, 0x68, 0x98, 0x68, 0x00, 0x21, 0x00, 0xf0, 0x7d, 0xfb, 0x00, 0xf0, 0x97, 0xf9, 0xff, 0xf7,
0x2d, 0xfe, 0x00, 0xf0, 0xa9, 0xfb, 0x00, 0xf0, 0x25, 0xf9, 0x00, 0xf0, 0x04, 0xf9, 0x00, 0xf0,
0x77, 0xfa, 0xff, 0xf7, 0x26, 0xfc, 0x00, 0xf0, 0x59, 0xfb, 0x00, 0xf0, 0xb5, 0xfb, 0x10, 0xbd,
0x00, 0x40, 0x00, 0x41, 0x6c, 0x02, 0x00, 0x20, 0x10, 0xb5, 0x06, 0x4c, 0x23, 0x78, 0x00, 0x2b,
0x07, 0xd1, 0x05, 0x4b, 0x00, 0x2b, 0x02, 0xd0, 0x04, 0x48, 0x00, 0xe0, 0x00, 0xbf, 0x01, 0x23,
0x23, 0x70, 0x10, 0xbd, 0x40, 0x02, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x04, 0x4b, 0x10, 0xb5, 0x00, 0x2b, 0x03, 0xd0, 0x03, 0x49, 0x04, 0x48, 0x00, 0xe0, 0x00, 0xbf,
0x10, 0xbd, 0xc0, 0x46, 0x00, 0x00, 0x00, 0x00, 0x44, 0x02, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00,
0x10, 0xb5, 0x0a, 0x4b, 0x98, 0x47, 0x0a, 0x4b, 0x98, 0x47, 0xff, 0xf7, 0x37, 0xfe, 0x09, 0x4b,
0xff, 0x22, 0x93, 0x43, 0x08, 0x4a, 0x93, 0x60, 0x08, 0x4b, 0x98, 0x47, 0x08, 0x4b, 0x98, 0x47,
0x08, 0x4b, 0x98, 0x47, 0x00, 0xf0, 0x58, 0xfb, 0xfe, 0xe7, 0xc0, 0x46, 0xf7, 0x22, 0x00, 0x00,
0xf7, 0x22, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0xed, 0x00, 0xe0, 0x59, 0x1c, 0x00, 0x00,
0xf7, 0x22, 0x00, 0x00, 0xf7, 0x22, 0x00, 0x00, 0x70, 0xb5, 0x00, 0x26, 0x0c, 0x4d, 0x0d, 0x4c,
0x64, 0x1b, 0xa4, 0x10, 0xa6, 0x42, 0x09, 0xd1, 0x00, 0x26, 0xfe, 0xf7, 0x0f, 0xfe, 0x0a, 0x4d,
0x0a, 0x4c, 0x64, 0x1b, 0xa4, 0x10, 0xa6, 0x42, 0x05, 0xd1, 0x70, 0xbd, 0xb3, 0x00, 0xeb, 0x58,
0x98, 0x47, 0x01, 0x36, 0xee, 0xe7, 0xb3, 0x00, 0xeb, 0x58, 0x98, 0x47, 0x01, 0x36, 0xf2, 0xe7,
0x98, 0x08, 0x00, 0x00, 0x98, 0x08, 0x00, 0x00, 0x98, 0x08, 0x00, 0x00, 0x9c, 0x08, 0x00, 0x00,
0xf8, 0xb5, 0x0d, 0x00, 0x09, 0x6e, 0x06, 0x00, 0x14, 0x00, 0xd7, 0x18, 0x89, 0x04, 0x18, 0xd5,
0x9f, 0x08, 0xbf, 0x00, 0xd7, 0x19, 0xbc, 0x42, 0x01, 0xd1, 0x00, 0x20, 0x08, 0xe0, 0x2a, 0x00,
0x02, 0xcc, 0x30, 0x00, 0x00, 0xf0, 0x7a, 0xfa, 0x43, 0x1c, 0xf4, 0xd1, 0x01, 0x20, 0x40, 0x42,
0xf8, 0xbd, 0x21, 0x78, 0x2a, 0x00, 0x30, 0x00, 0x00, 0xf0, 0x7c, 0xf9, 0x01, 0x34, 0x43, 0x1c,
0xf4, 0xd0, 0xbc, 0x42, 0xf5, 0xd1, 0xe8, 0xe7, 0x00, 0x23, 0x10, 0xb5, 0x04, 0x00, 0x03, 0x60,
0x43, 0x60, 0x83, 0x60, 0x81, 0x81, 0x03, 0x66, 0xc2, 0x81, 0x03, 0x61, 0x43, 0x61, 0x83, 0x61,
0x19, 0x00, 0x08, 0x22, 0x58, 0x30, 0x00, 0xf0, 0xe7, 0xfa, 0x05, 0x4b, 0x24, 0x62, 0x63, 0x62,
0x04, 0x4b, 0xa3, 0x62, 0x04, 0x4b, 0xe3, 0x62, 0x04, 0x4b, 0x23, 0x63, 0x10, 0xbd, 0xc0, 0x46,
0x01, 0x20, 0x00, 0x00, 0x65, 0x1e, 0x00, 0x00, 0x51, 0x1f, 0x00, 0x00, 0x05, 0x23, 0x00, 0x00,
0x30, 0xb5, 0x00, 0x28, 0x18, 0xd0, 0x00, 0x29, 0x18, 0xd0, 0x0d, 0x4a, 0x01, 0x24, 0x07, 0x25,
0x04, 0xe0, 0x03, 0x88, 0x13, 0x85, 0x02, 0x30, 0x00, 0x29, 0x0b, 0xd0, 0x01, 0x39, 0x13, 0x7e,
0x23, 0x42, 0xfc, 0xd0, 0x53, 0x68, 0x2b, 0x40, 0x01, 0x2b, 0xf2, 0xd0, 0x03, 0x78, 0x13, 0x85,
0x01, 0x30, 0xf1, 0xe7, 0x01, 0x20, 0x30, 0xbd, 0x00, 0x20, 0xfc, 0xe7, 0x01, 0x20, 0xfa, 0xe7,
0x00, 0x10, 0x00, 0x42, 0xf7, 0xb5, 0x04, 0x00, 0x07, 0x00, 0x00, 0x26, 0x01, 0x91, 0x48, 0x34,
0x00, 0x2c, 0x01, 0xd1, 0x30, 0x00, 0xfe, 0xbd, 0x63, 0x68, 0xa5, 0x68, 0x00, 0x93, 0x00, 0x9b,
0x01, 0x3b, 0x00, 0x93, 0x01, 0xd5, 0x24, 0x68, 0xf2, 0xe7, 0xab, 0x89, 0x01, 0x2b, 0x08, 0xd9,
0x0e, 0x22, 0xab, 0x5e, 0x01, 0x33, 0x04, 0xd0, 0x29, 0x00, 0x38, 0x00, 0x01, 0x9b, 0x98, 0x47,
0x06, 0x43, 0x64, 0x35, 0xeb, 0xe7, 0x70, 0x47, 0x70, 0xb5, 0x0c, 0x1e, 0x0e, 0xd0, 0x00, 0x2a,
0x0c, 0xd0, 0x15, 0x00, 0x02, 0x28, 0x01, 0xd9, 0x28, 0x00, 0x70, 0xbd, 0x08, 0x4b, 0x00, 0x2b,
0x06, 0xd0, 0x11, 0x00, 0x20, 0x00, 0x00, 0xe0, 0x00, 0xbf, 0xf5, 0xe7, 0x00, 0x25, 0xf3, 0xe7,
0x8e, 0x18, 0x20, 0x78, 0x01, 0x34, 0x00, 0xf0, 0x05, 0xfa, 0xb4, 0x42, 0xf9, 0xd1, 0xeb, 0xe7,
0x00, 0x00, 0x00, 0x00, 0x0a, 0x4b, 0x0b, 0x4a, 0x1a, 0x60, 0x0b, 0x4a, 0x9a, 0x81, 0xc0, 0x22,
0x92, 0x02, 0x5a, 0x60, 0x1a, 0x00, 0xd3, 0x69, 0x00, 0x2b, 0xfc, 0xd1, 0x04, 0x4a, 0x13, 0x68,
0x02, 0x21, 0x0b, 0x43, 0x13, 0x60, 0xd3, 0x69, 0x00, 0x2b, 0xfc, 0xd1, 0x70, 0x47, 0xc0, 0x46,
0x00, 0x10, 0x00, 0x42, 0x04, 0x01, 0x31, 0x40, 0x2b, 0xf6, 0xff, 0xff, 0x10, 0xb5, 0x0b, 0x4b,
0x1b, 0x68, 0xdb, 0x43, 0x9b, 0x07, 0x00, 0xd0, 0x10, 0xbd, 0x09, 0x4b, 0x9c, 0x68, 0x43, 0x01,
0x1b, 0x1a, 0x9b, 0x00, 0x18, 0x18, 0xc0, 0x00, 0xfa, 0x21, 0x89, 0x00, 0xff, 0xf7, 0x60, 0xf8,
0x03, 0x4a, 0x93, 0x68, 0x1b, 0x1b, 0x83, 0x42, 0xfb, 0xd3, 0xed, 0xe7, 0x10, 0xe0, 0x00, 0xe0,
0x5c, 0x02, 0x00, 0x20, 0xf8, 0xb5, 0x1f, 0x00, 0x8b, 0x89, 0x05, 0x00, 0x0c, 0x00, 0x16, 0x00,
0xdb, 0x05, 0x05, 0xd5, 0x0e, 0x23, 0xc9, 0x5e, 0x00, 0x22, 0x02, 0x23, 0x00, 0xf0, 0xe8, 0xf8,
0xa3, 0x89, 0x05, 0x4a, 0x28, 0x00, 0x13, 0x40, 0xa3, 0x81, 0x32, 0x00, 0x0e, 0x23, 0xe1, 0x5e,
0x3b, 0x00, 0x00, 0xf0, 0xc9, 0xf8, 0xf8, 0xbd, 0xff, 0xef, 0xff, 0xff, 0x08, 0x4a, 0x13, 0x68,
0x00, 0x2b, 0x09, 0xd0, 0x07, 0x49, 0x18, 0x18, 0x88, 0x42, 0x02, 0xd8, 0x10, 0x60, 0x18, 0x00,
0x70, 0x47, 0x01, 0x23, 0x5b, 0x42, 0xfa, 0xe7, 0x03, 0x4b, 0x13, 0x60, 0xf2, 0xe7, 0xc0, 0x46,
0x78, 0x02, 0x00, 0x20, 0x80, 0x04, 0x00, 0x20, 0x80, 0x02, 0x00, 0x20, 0x09, 0x4b, 0x80, 0x22,
0x52, 0x02, 0x1a, 0x60, 0xa0, 0x22, 0x52, 0x02, 0x1a, 0x61, 0x01, 0x22, 0x4a, 0x21, 0x5a, 0x54,
0x01, 0x31, 0x5a, 0x54, 0x4d, 0x32, 0x45, 0x39, 0x99, 0x54, 0x19, 0x3a, 0x2d, 0x31, 0x99, 0x54,
0x70, 0x47, 0xc0, 0x46, 0x00, 0x44, 0x00, 0x41, 0x09, 0x4b, 0x5b, 0x8b, 0x5b, 0x07, 0x0d, 0xd0,
0x07, 0x4b, 0x80, 0x22, 0x1a, 0x76, 0x79, 0x3a, 0x5a, 0x83, 0x1b, 0x7e, 0x5b, 0x07, 0x05, 0xd5,
0x03, 0x4b, 0x04, 0x21, 0x1a, 0x8d, 0x1a, 0x7e, 0x0a, 0x42, 0xfb, 0xd1, 0x70, 0x47, 0xc0, 0x46,
0x00, 0x10, 0x00, 0x42, 0x70, 0xb5, 0x64, 0x25, 0x4a, 0x1e, 0x55, 0x43, 0x0e, 0x00, 0x29, 0x00,
0x70, 0x31, 0xff, 0xf7, 0xcd, 0xfa, 0x04, 0x1e, 0x08, 0xd0, 0x00, 0x21, 0x2a, 0x00, 0x01, 0x60,
0x46, 0x60, 0x0c, 0x30, 0xa0, 0x60, 0x64, 0x32, 0x00, 0xf0, 0xc6, 0xf9, 0x20, 0x00, 0x70, 0xbd,
0x70, 0xb5, 0x0c, 0x00, 0x0e, 0x25, 0x49, 0x5f, 0x00, 0xf0, 0x7a, 0xf8, 0xa3, 0x89, 0x42, 0x1c,
0x03, 0xd1, 0x05, 0x4a, 0x13, 0x40, 0xa3, 0x81, 0x70, 0xbd, 0x80, 0x22, 0x52, 0x01, 0x13, 0x43,
0xa3, 0x81, 0x60, 0x65, 0xf8, 0xe7, 0xc0, 0x46, 0xff, 0xef, 0xff, 0xff, 0x7f, 0xb5, 0x1d, 0x00,
0x08, 0x4b, 0x04, 0x00, 0xe0, 0x33, 0x1e, 0x68, 0x2b, 0x00, 0x00, 0x29, 0x01, 0xd1, 0x0a, 0x00,
0x01, 0xa9, 0xb0, 0x47, 0x43, 0x1c, 0x03, 0xd1, 0x00, 0x23, 0x2b, 0x60, 0x8a, 0x33, 0x23, 0x60,
0x04, 0xb0, 0x70, 0xbd, 0x14, 0x00, 0x00, 0x20, 0x70, 0xb5, 0x00, 0x29, 0x0f, 0xd0, 0x00, 0x2a,
0x0d, 0xd0, 0x15, 0x00, 0x00, 0x28, 0x08, 0xd1, 0x0c, 0x00, 0x8e, 0x18, 0x01, 0x20, 0x00, 0xf0,
0x0c, 0xf9, 0x20, 0x70, 0x01, 0x34, 0xb4, 0x42, 0xf8, 0xd1, 0x28, 0x00, 0x70, 0xbd, 0x00, 0x25,
0xfb, 0xe7, 0xc0, 0x46, 0x93, 0x68, 0x10, 0xb5, 0x01, 0x3b, 0x93, 0x60, 0x00, 0x2b, 0x04, 0xda,
0x94, 0x69, 0xa3, 0x42, 0x07, 0xdb, 0x0a, 0x29, 0x05, 0xd0, 0x13, 0x68, 0x58, 0x1c, 0x10, 0x60,
0x19, 0x70, 0x08, 0x00, 0x10, 0xbd, 0xff, 0xf7, 0x05, 0xfa, 0x01, 0x00, 0xf9, 0xe7, 0xfe, 0xe7,
0x70, 0xb5, 0x0c, 0x00, 0x0e, 0x25, 0x49, 0x5f, 0x00, 0xf0, 0x36, 0xf8, 0x00, 0x28, 0x03, 0xdb,
0x63, 0x6d, 0x1b, 0x18, 0x63, 0x65, 0x70, 0xbd, 0xa3, 0x89, 0x02, 0x4a, 0x13, 0x40, 0xa3, 0x81,
0xf9, 0xe7, 0xc0, 0x46, 0xff, 0xef, 0xff, 0xff, 0x70, 0xb5, 0x05, 0x00, 0x08, 0x00, 0x11, 0x00,
0x00, 0x22, 0x06, 0x4c, 0x22, 0x60, 0x1a, 0x00, 0x00, 0xf0, 0x92, 0xf9, 0x43, 0x1c, 0x03, 0xd1,
0x23, 0x68, 0x00, 0x2b, 0x00, 0xd0, 0x2b, 0x60, 0x70, 0xbd, 0xc0, 0x46, 0x10, 0x00, 0x00, 0x20,
0x70, 0xb5, 0x05, 0x00, 0x08, 0x00, 0x11, 0x00, 0x00, 0x22, 0x06, 0x4c, 0x22, 0x60, 0x1a, 0x00,
0x00, 0xf0, 0x76, 0xf9, 0x43, 0x1c, 0x03, 0xd1, 0x23, 0x68, 0x00, 0x2b, 0x00, 0xd0, 0x2b, 0x60,
0x70, 0xbd, 0xc0, 0x46, 0x10, 0x00, 0x00, 0x20, 0x70, 0xb5, 0x05, 0x00, 0x08, 0x00, 0x11, 0x00,
0x00, 0x22, 0x06, 0x4c, 0x22, 0x60, 0x1a, 0x00, 0x00, 0xf0, 0x66, 0xf9, 0x43, 0x1c, 0x03, 0xd1,
0x23, 0x68, 0x00, 0x2b, 0x00, 0xd0, 0x2b, 0x60, 0x70, 0xbd, 0xc0, 0x46, 0x10, 0x00, 0x00, 0x20,
0x05, 0x4a, 0x00, 0x23, 0x13, 0x60, 0x93, 0x60, 0x04, 0x49, 0x51, 0x60, 0x06, 0x21, 0x11, 0x60,
0x03, 0x4a, 0x93, 0x60, 0x13, 0x60, 0x70, 0x47, 0x10, 0xe0, 0x00, 0xe0, 0x7f, 0xbb, 0x00, 0x00,
0x5c, 0x02, 0x00, 0x20, 0x10, 0xb5, 0x06, 0x4b, 0x1b, 0x68, 0x06, 0x4b, 0x9a, 0x68, 0x01, 0x32,
0x9a, 0x60, 0x1b, 0x68, 0x00, 0x2b, 0x02, 0xd0, 0x02, 0x4a, 0x50, 0x68, 0x98, 0x47, 0x10, 0xbd,
0x10, 0xe0, 0x00, 0xe0, 0x5c, 0x02, 0x00, 0x20, 0x82, 0xb0, 0x00, 0x29, 0x00, 0xd1, 0x01, 0xa9,
0x10, 0x1e, 0x06, 0xd0, 0x00, 0x2b, 0x06, 0xd0, 0x13, 0x78, 0x0b, 0x60, 0x10, 0x78, 0x43, 0x1e,
0x98, 0x41, 0x02, 0xb0, 0x70, 0x47, 0x02, 0x20, 0x40, 0x42, 0xfa, 0xe7, 0x00, 0x23, 0x70, 0xb5,
0x06, 0x4c, 0x05, 0x00, 0x08, 0x00, 0x23, 0x60, 0xff, 0xf7, 0xc0, 0xfe, 0x43, 0x1c, 0x03, 0xd1,
0x23, 0x68, 0x00, 0x2b, 0x00, 0xd0, 0x2b, 0x60, 0x70, 0xbd, 0xc0, 0x46, 0x10, 0x00, 0x00, 0x20,
0x00, 0x23, 0x70, 0xb5, 0x06, 0x4c, 0x05, 0x00, 0x08, 0x00, 0x23, 0x60, 0x00, 0xf0, 0xfc, 0xf8,
0x43, 0x1c, 0x03, 0xd1, 0x23, 0x68, 0x00, 0x2b, 0x00, 0xd0, 0x2b, 0x60, 0x70, 0xbd, 0xc0, 0x46,
0x10, 0x00, 0x00, 0x20, 0x00, 0x23, 0x70, 0xb5, 0x06, 0x4c, 0x05, 0x00, 0x08, 0x00, 0x11, 0x00,
0x23, 0x60, 0x00, 0xf0, 0xed, 0xf8, 0x43, 0x1c, 0x03, 0xd1, 0x23, 0x68, 0x00, 0x2b, 0x00, 0xd0,
0x2b, 0x60, 0x70, 0xbd, 0x10, 0x00, 0x00, 0x20, 0x00, 0x23, 0x70, 0xb5, 0x06, 0x4c, 0x05, 0x00,
0x08, 0x00, 0x23, 0x60, 0x00, 0xf0, 0xe0, 0xf8, 0x43, 0x1c, 0x03, 0xd1, 0x23, 0x68, 0x00, 0x2b,
0x00, 0xd0, 0x2b, 0x60, 0x70, 0xbd, 0xc0, 0x46, 0x10, 0x00, 0x00, 0x20, 0x0f, 0xb4, 0x07, 0xb5,
0x05, 0x4b, 0x04, 0xaa, 0x1b, 0x68, 0x02, 0xca, 0x98, 0x68, 0x01, 0x92, 0x00, 0xf0, 0x30, 0xf8,
0x03, 0xb0, 0x08, 0xbc, 0x04, 0xb0, 0x18, 0x47, 0x6c, 0x02, 0x00, 0x20, 0x10, 0xb5, 0x0c, 0x23,
0xd4, 0x5e, 0x80, 0x23, 0x9b, 0x01, 0x1c, 0x42, 0x04, 0xd1, 0x1c, 0x43, 0x94, 0x81, 0x14, 0x6e,
0x23, 0x43, 0x13, 0x66, 0xff, 0xf7, 0xe0, 0xfb, 0x10, 0xbd, 0x00, 0xb5, 0x83, 0xb0, 0x00, 0x23,
0x01, 0x93, 0x01, 0x21, 0x01, 0xa8, 0xff, 0xf7, 0x49, 0xfc, 0x00, 0x28, 0xf9, 0xd0, 0x01, 0x98,
0x03, 0xb0, 0x00, 0xbd, 0x00, 0xb5, 0x83, 0xb0, 0x6b, 0x46, 0xd8, 0x71, 0x01, 0x21, 0x6b, 0x46,
0xd8, 0x1d, 0xff, 0xf7, 0x95, 0xfd, 0x00, 0x28, 0xf8, 0xd0, 0x03, 0xb0, 0x00, 0xbd, 0xfe, 0xe7,
0x10, 0xb5, 0x13, 0x00, 0x0a, 0x00, 0x01, 0x00, 0x02, 0x48, 0x00, 0x68, 0xfe, 0xf7, 0x46, 0xfb,
0x10, 0xbd, 0xc0, 0x46, 0x6c, 0x02, 0x00, 0x20, 0x0b, 0x1e, 0x04, 0xd0, 0xff, 0x2a, 0x04, 0xd9,
0x8a, 0x23, 0x03, 0x60, 0x8b, 0x3b, 0x18, 0x00, 0x70, 0x47, 0x0a, 0x70, 0x01, 0x23, 0xfa, 0xe7,
0xc9, 0xb2, 0x82, 0x18, 0x90, 0x42, 0x01, 0xd1, 0x00, 0x20, 0x70, 0x47, 0x03, 0x78, 0x8b, 0x42,
0xfb, 0xd0, 0x01, 0x30, 0xf6, 0xe7, 0xfe, 0xe7, 0x03, 0x4b, 0x00, 0x22, 0x9a, 0x60, 0x1a, 0x68,
0x01, 0x21, 0x0a, 0x43, 0x1a, 0x60, 0x70, 0x47, 0x10, 0xe0, 0x00, 0xe0, 0x10, 0xb5, 0x03, 0x4b,
0x00, 0x22, 0x1a, 0x70, 0xff, 0xf7, 0xf0, 0xff, 0x10, 0xbd, 0xc0, 0x46, 0x7c, 0x02, 0x00, 0x20,
0x10, 0xb5, 0x03, 0x4b, 0x01, 0x00, 0x18, 0x68, 0xff, 0xf7, 0x22, 0xf9, 0x10, 0xbd, 0xc0, 0x46,
0x6c, 0x02, 0x00, 0x20, 0x4a, 0x42, 0x4a, 0x41, 0x80, 0x23, 0x10, 0xb5, 0x52, 0x00, 0xdb, 0x00,
0xfe, 0xf7, 0x8a, 0xfd, 0x10, 0xbd, 0x23, 0x2d, 0x30, 0x2b, 0x20, 0x00, 0x68, 0x6c, 0x4c, 0x00,
0x65, 0x66, 0x67, 0x45, 0x46, 0x47, 0x00, 0x00, 0x10, 0xb5, 0x02, 0x49, 0xff, 0xf7, 0x5a, 0xfd,
0x10, 0xbd, 0xc0, 0x46, 0xd5, 0x1a, 0x00, 0x00, 0x94, 0x23, 0x02, 0x4a, 0x5b, 0x00, 0xd0, 0x5c,
0x70, 0x47, 0xc0, 0x46, 0x14, 0x00, 0x00, 0x20, 0x03, 0x00, 0x12, 0x18, 0x93, 0x42, 0x00, 0xd1,
0x70, 0x47, 0x19, 0x70, 0x01, 0x33, 0xf9, 0xe7, 0x10, 0xb5, 0x00, 0x20, 0xff, 0xf7, 0x46, 0xfc,
0x00, 0xf0, 0x1e, 0xf8, 0xfc, 0xe7, 0x70, 0x47, 0x01, 0x4b, 0x82, 0x22, 0x5a, 0x60, 0x70, 0x47,
0x00, 0x40, 0x00, 0x41, 0x10, 0xb5, 0x0e, 0x23, 0xc9, 0x5e, 0xff, 0xf7, 0x11, 0xff, 0x10, 0xbd,
0x80, 0x23, 0x9b, 0x01, 0x00, 0x20, 0x4b, 0x60, 0x70, 0x47, 0xc0, 0x46, 0x43, 0x00, 0x50, 0x4f,
0x53, 0x49, 0x58, 0x00, 0x00, 0x2e, 0x00, 0x00, 0xbf, 0xf3, 0x5f, 0x8f, 0x62, 0xb6, 0x70, 0x47,
0x10, 0xb5, 0xff, 0xf7, 0x81, 0xf9, 0x10, 0xbd, 0x10, 0xb5, 0x00, 0xf0, 0x15, 0xf8, 0x10, 0xbd,
0x10, 0xb5, 0xff, 0xf7, 0xe5, 0xff, 0x10, 0xbd, 0x10, 0xb5, 0x00, 0xf0, 0x15, 0xf8, 0x10, 0xbd,
0x10, 0xb5, 0x00, 0xf0, 0x13, 0xf8, 0x10, 0xbd, 0x10, 0xb5, 0xff, 0xf7, 0x25, 0xfe, 0x10, 0xbd,
0x10, 0xb5, 0xff, 0xf7, 0x29, 0xfd, 0x10, 0xbd, 0x01, 0x20, 0x40, 0x42, 0x70, 0x47, 0xc0, 0x46,
0x70, 0x47, 0xc0, 0x46, 0x80, 0x01, 0x00, 0x20, 0x01, 0x20, 0x70, 0x47, 0x00, 0x20, 0x70, 0x47,
0x70, 0x47, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
};
#endif
|
// find handles GET /person/name/:name requests and return the Person.
func (p *Person) find(c *gin.Context) {
name := c.Param("name")
person, err := p.Controller.Find(name)
if err != nil {
c.JSON(
http.StatusInternalServerError,
gin.H{
"message": fmt.Sprintf("Failed to find %s", name),
"error": err,
})
return
}
if person == nil {
c.JSON(
http.StatusNoContent,
gin.H{
"message": fmt.Sprintf("%s wasn't found", name),
})
return
}
c.JSON(
http.StatusOK,
person,
)
} |
/// Insert an element in the matrix. If the element is already present,
/// its value is overwritten.
///
/// Warning: this is not an efficient operation, as it requires
/// a non-constant lookup followed by two `Vec` insertions.
///
/// The insertion will be efficient, however, if the elements are inserted
/// according to the matrix's order, eg following the row order for a CSR
/// matrix.
pub fn insert(&mut self, row: usize, col: usize, val: N) {
match self.storage() {
CSR => self.insert_outer_inner(row, col, val),
CSC => self.insert_outer_inner(col, row, val),
}
} |
rs = 0
cs = 0
for i in range(5):
r = list(map(int, input().split()))
if r.count(1) > 0:
rn = i
p = r.index(1)
if rn <= 2 :
rs = 2 -rn
else :
rs = abs(2 -rn)
if p <=2:
cs = 2 -p
else:
cs = abs(2 -p)
print(rs + cs)
|
<reponame>EmmanuelOga/virgil<filename>test/c.java
package virgil;
public class Test extends ATest {
public Test() { super(); }
public int magicNumber() {
return super.magicNumber();
}
}
|
<gh_stars>1-10
package com.vmware.vim25;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for HostListSummary complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="HostListSummary">
* <complexContent>
* <extension base="{urn:vim25}DynamicData">
* <sequence>
* <element name="host" type="{urn:vim25}ManagedObjectReference" minOccurs="0"/>
* <element name="hardware" type="{urn:vim25}HostHardwareSummary" minOccurs="0"/>
* <element name="runtime" type="{urn:vim25}HostRuntimeInfo" minOccurs="0"/>
* <element name="config" type="{urn:vim25}HostConfigSummary"/>
* <element name="quickStats" type="{urn:vim25}HostListSummaryQuickStats"/>
* <element name="overallStatus" type="{urn:vim25}ManagedEntityStatus"/>
* <element name="rebootRequired" type="{http://www.w3.org/2001/XMLSchema}boolean"/>
* <element name="customValue" type="{urn:vim25}CustomFieldValue" maxOccurs="unbounded" minOccurs="0"/>
* <element name="managementServerIp" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="maxEVCModeKey" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="currentEVCModeKey" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="gateway" type="{urn:vim25}HostListSummaryGatewaySummary" minOccurs="0"/>
* <element name="tpmAttestation" type="{urn:vim25}HostTpmAttestationInfo" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "HostListSummary", propOrder = {
"host",
"hardware",
"runtime",
"config",
"quickStats",
"overallStatus",
"rebootRequired",
"customValue",
"managementServerIp",
"maxEVCModeKey",
"currentEVCModeKey",
"gateway",
"tpmAttestation"
})
public class HostListSummary
extends DynamicData
{
protected ManagedObjectReference host;
protected HostHardwareSummary hardware;
protected HostRuntimeInfo runtime;
@XmlElement(required = true)
protected HostConfigSummary config;
@XmlElement(required = true)
protected HostListSummaryQuickStats quickStats;
@XmlElement(required = true)
protected ManagedEntityStatus overallStatus;
protected boolean rebootRequired;
protected List<CustomFieldValue> customValue;
protected String managementServerIp;
protected String maxEVCModeKey;
protected String currentEVCModeKey;
protected HostListSummaryGatewaySummary gateway;
protected HostTpmAttestationInfo tpmAttestation;
/**
* Gets the value of the host property.
*
* @return
* possible object is
* {@link ManagedObjectReference }
*
*/
public ManagedObjectReference getHost() {
return host;
}
/**
* Sets the value of the host property.
*
* @param value
* allowed object is
* {@link ManagedObjectReference }
*
*/
public void setHost(ManagedObjectReference value) {
this.host = value;
}
/**
* Gets the value of the hardware property.
*
* @return
* possible object is
* {@link HostHardwareSummary }
*
*/
public HostHardwareSummary getHardware() {
return hardware;
}
/**
* Sets the value of the hardware property.
*
* @param value
* allowed object is
* {@link HostHardwareSummary }
*
*/
public void setHardware(HostHardwareSummary value) {
this.hardware = value;
}
/**
* Gets the value of the runtime property.
*
* @return
* possible object is
* {@link HostRuntimeInfo }
*
*/
public HostRuntimeInfo getRuntime() {
return runtime;
}
/**
* Sets the value of the runtime property.
*
* @param value
* allowed object is
* {@link HostRuntimeInfo }
*
*/
public void setRuntime(HostRuntimeInfo value) {
this.runtime = value;
}
/**
* Gets the value of the config property.
*
* @return
* possible object is
* {@link HostConfigSummary }
*
*/
public HostConfigSummary getConfig() {
return config;
}
/**
* Sets the value of the config property.
*
* @param value
* allowed object is
* {@link HostConfigSummary }
*
*/
public void setConfig(HostConfigSummary value) {
this.config = value;
}
/**
* Gets the value of the quickStats property.
*
* @return
* possible object is
* {@link HostListSummaryQuickStats }
*
*/
public HostListSummaryQuickStats getQuickStats() {
return quickStats;
}
/**
* Sets the value of the quickStats property.
*
* @param value
* allowed object is
* {@link HostListSummaryQuickStats }
*
*/
public void setQuickStats(HostListSummaryQuickStats value) {
this.quickStats = value;
}
/**
* Gets the value of the overallStatus property.
*
* @return
* possible object is
* {@link ManagedEntityStatus }
*
*/
public ManagedEntityStatus getOverallStatus() {
return overallStatus;
}
/**
* Sets the value of the overallStatus property.
*
* @param value
* allowed object is
* {@link ManagedEntityStatus }
*
*/
public void setOverallStatus(ManagedEntityStatus value) {
this.overallStatus = value;
}
/**
* Gets the value of the rebootRequired property.
*
*/
public boolean isRebootRequired() {
return rebootRequired;
}
/**
* Sets the value of the rebootRequired property.
*
*/
public void setRebootRequired(boolean value) {
this.rebootRequired = value;
}
/**
* Gets the value of the customValue property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the customValue property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getCustomValue().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link CustomFieldValue }
*
*
*/
public List<CustomFieldValue> getCustomValue() {
if (customValue == null) {
customValue = new ArrayList<CustomFieldValue>();
}
return this.customValue;
}
/**
* Gets the value of the managementServerIp property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getManagementServerIp() {
return managementServerIp;
}
/**
* Sets the value of the managementServerIp property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setManagementServerIp(String value) {
this.managementServerIp = value;
}
/**
* Gets the value of the maxEVCModeKey property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMaxEVCModeKey() {
return maxEVCModeKey;
}
/**
* Sets the value of the maxEVCModeKey property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMaxEVCModeKey(String value) {
this.maxEVCModeKey = value;
}
/**
* Gets the value of the currentEVCModeKey property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCurrentEVCModeKey() {
return currentEVCModeKey;
}
/**
* Sets the value of the currentEVCModeKey property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCurrentEVCModeKey(String value) {
this.currentEVCModeKey = value;
}
/**
* Gets the value of the gateway property.
*
* @return
* possible object is
* {@link HostListSummaryGatewaySummary }
*
*/
public HostListSummaryGatewaySummary getGateway() {
return gateway;
}
/**
* Sets the value of the gateway property.
*
* @param value
* allowed object is
* {@link HostListSummaryGatewaySummary }
*
*/
public void setGateway(HostListSummaryGatewaySummary value) {
this.gateway = value;
}
/**
* Gets the value of the tpmAttestation property.
*
* @return
* possible object is
* {@link HostTpmAttestationInfo }
*
*/
public HostTpmAttestationInfo getTpmAttestation() {
return tpmAttestation;
}
/**
* Sets the value of the tpmAttestation property.
*
* @param value
* allowed object is
* {@link HostTpmAttestationInfo }
*
*/
public void setTpmAttestation(HostTpmAttestationInfo value) {
this.tpmAttestation = value;
}
}
|
/**
* Subscriptions admin page
* @author tina, bwolf, aseem, michael
*
*/
public class CNSSubscriptionPageServlet extends AdminServletBase {
private static final long serialVersionUID = 1L;
private static Logger logger = Logger.getLogger(CNSSubscriptionPageServlet.class);
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (redirectUnauthenticatedUser(request, response)) {
return;
}
CMBControllerServlet.valueAccumulator.initializeAllCounters();
response.setContentType("text/html");
PrintWriter out = response.getWriter();
Map<?, ?> parameters = request.getParameterMap();
String userId = request.getParameter("userId");
String topicArn = request.getParameter("topicArn");
String endPoint = request.getParameter("endPoint");
String protocol = request.getParameter("protocol");
String arn = request.getParameter("arn");
String nextToken = request.getParameter("nextToken");
connect(request);
if (parameters.containsKey("Subscribe")) {
try {
SubscribeRequest subscribeRequest = new SubscribeRequest(topicArn, protocol.toLowerCase(), endPoint);
sns.subscribe(subscribeRequest);
} catch (Exception ex) {
logger.error("event=subscribe", ex);
throw new ServletException(ex);
}
} else if (parameters.containsKey("Unsubscribe")) {
try {
UnsubscribeRequest unsubscribeRequest = new UnsubscribeRequest(arn);
sns.unsubscribe(unsubscribeRequest);
} catch (Exception ex) {
logger.error("event=unsubscribe arn=" + arn , ex);
throw new ServletException(ex);
}
}
List<Subscription> subscriptions = new ArrayList<Subscription>();
ListSubscriptionsByTopicResult listSubscriptionsByTopicResult = null;
try {
listSubscriptionsByTopicResult = sns.listSubscriptionsByTopic(new ListSubscriptionsByTopicRequest(topicArn, nextToken));
subscriptions = listSubscriptionsByTopicResult.getSubscriptions();
} catch (Exception ex) {
logger.error("event=listAllSubscriptionsByTopic topic_arn=" + topicArn, ex);
throw new ServletException(ex);
}
ICNSTopicPersistence topicHandler = PersistenceFactory.getTopicPersistence();
CNSTopic topic = null;
try {
topic = topicHandler.getTopic(topicArn);
} catch (Exception ex) {
logger.error("event=getTopic topic_arn=" + topicArn, ex);
throw new ServletException(ex);
}
out.println("<html>");
out.println("<script type='text/javascript' language='javascript'>");
out.println("function changeEndpointHint(protocol){ ");
out.println(" if (protocol == 'HTTP' || protocol == 'HTTPS') { ");
out.println(" document.getElementById('endPoint').placeholder = 'e.g. http://company.com'; }");
out.println(" else if (protocol == 'EMAIL' || protocol == 'EMAIL_JSON') { ");
out.println(" document.getElementById('endPoint').placeholder = 'e.g. [email protected]'; }");
out.println(" else if (protocol == 'CQS' || protocol == 'SQS') { ");
out.println(" document.getElementById('endPoint').placeholder = 'e.g. arn:aws:cqs:ccp:555555555555:my-queue'; } ");
out.println(" else if (protocol == 'redis') { document.getElementById('endPoint').placeholder = 'e.g. redis://server:port/channelname'; }");
out.println("}");
out.println("</script>");
header(request, out, "Subscriptions for Topic "+ ((topic != null) ? topic.getName():""));
out.println("<body>");
out.println("<h2>Subscriptions for Topic "+ ((topic != null) ? topic.getName():"") + "</h2>");
if (user != null) {
out.println("<table><tr><td><b>User Name:</b></td><td>"+ user.getUserName()+"</td></tr>");
out.println("<tr><td><b>User ID:</b></td><td>"+ user.getUserId()+"</td></tr>");
out.println("<tr><td><b>Access Key:</b></td><td>"+user.getAccessKey()+"</td></tr>");
out.println("<tr><td><b>Access Secret:</b></td><td>"+user.getAccessSecret()+"</td></tr>");
out.println("<tr><td><b>Topic Name:</b></td><td>"+ topic.getName()+"</td></tr>");
out.println("<tr><td><b>Topic Display Name:</b></td><td>" + topic.getDisplayName()+ "</td></tr>");
out.println("<tr><td><b>Topic Arn:</b></td><td>" + topic.getArn()+ "</td></tr>");
out.println("<tr><td><b>Num Subscriptions:</b></td><td>" + subscriptions.size()+ "</td></tr></table>");
}
out.println("<p><table><tr><td><b>Protocol</b></td><td><b>End Point</b></td><td> </td></tr>");
out.println("<form action=\"/webui/cnsuser/subscription/?userId="+userId+"&topicArn="+topicArn+"\" method=POST>");
out.println("<tr><td><select name='protocol' onchange='changeEndpointHint(this.value)'><option value='HTTP'>HTTP</option><option value='HTTPS'>HTTPS</option><option value='EMAIL'>EMAIL</option><option value='EMAIL_JSON'>EMAIL_JSON</option><option value='CQS'>CQS</option><option value='SQS'>SQS</option><option value='redis'>REDIS</option></select></td>");
out.println("<td><input type='text' name='endPoint' id = 'endPoint' size='65' placeholder='e.g. http://company.com'><input type='hidden' name='userId' value='"+ userId + "'></td><td><input type='submit' value='Subscribe' name='Subscribe' /></td></tr>");
out.println("</form></table>");
out.println("<p><hr width='100%' align='left' />");
out.println("<p><table class = 'alternatecolortable' border='1'>");
out.println("<tr><th>Row</th>");
out.println("<th>Arn</th>");
out.println("<th>Protocol</th>");
out.println("<th>End Point</th>");
out.println("<th>Subscription Attributes</th>");
out.println("<th>Raw Message Delivery</th>");
out.println("<th> </th></tr>");
for (int i = 0; subscriptions != null && i < subscriptions.size(); i++) {
Subscription s = subscriptions.get(i);
out.println("<tr>");
out.println("<form action=\"/webui/cnsuser/subscription/?userId="+user.getUserId()+"&arn="+s.getSubscriptionArn()+"&topicArn="+topicArn+"\" method=POST>");
out.println("<td>"+i+"</td>");
out.println("<td>"+s.getSubscriptionArn() +"<input type='hidden' name='arn' value="+s.getSubscriptionArn()+"></td>");
out.println("<td>"+s.getProtocol()+"</td>");
if(s.getProtocol().toLowerCase().equals("cqs")&&isAdmin(request)){
out.println("<td><a href='/webui/cqsuser?userId="+Util.getUserIdForQueueArn(s.getEndpoint())+"'>"+s.getEndpoint()+"</a></td>");
} else {
out.println("<td>"+s.getEndpoint()+"</td>");
}
if (s.getProtocol().toString().equals("http") && !s.getSubscriptionArn().equals("PendingConfirmation")) {
out.println("<td><a href='#' onclick=\"window.open('/webui/cnsuser/subscription/editdeliverypolicy?subscriptionArn="+ s.getSubscriptionArn() + "&userId=" + userId + "', 'EditDeliveryPolicy', 'height=630,width=580,toolbar=no')\">View/Edit Delivery Policy</a></td>");
} else {
out.println("<td> </td>");
}
if ((s.getProtocol().toString().equals("https") || s.getProtocol().toString().equals("http") || s.getProtocol().toString().equals("cqs") || s.getProtocol().toString().equals("sqs"))
&& !s.getSubscriptionArn().equals("PendingConfirmation")) {
String url = "/webui/cnsuser/subscription/rawmessagedeliverypolicy/?subscriptionArn="+ s.getSubscriptionArn() + "&userId=" + userId;
out.println("<td><a href='#' onclick=\"window.open('" + url + "', 'RawMessageDelivery', 'height=200,width=580,toolbar=no')\">Raw Message Delivery</a></td>");
} else {
out.println("<td> </td>");
}
if (s.getSubscriptionArn().equals("PendingConfirmation")) {
out.println("<td> </td>");
} else {
out.println("<td><input type='submit' value='Unsubscribe' name='Unsubscribe'/></td>");
}
out.println("</form></tr>");
}
out.println("</table></p>");
if (listSubscriptionsByTopicResult != null && listSubscriptionsByTopicResult.getNextToken() != null) {
out.println("<p><a href='/webui/cnsuser/subscription/?userId="+userId+"&topicArn="+topicArn+"&nextToken="+response.encodeURL(listSubscriptionsByTopicResult.getNextToken())+"'>next ></a></p>");
}
out.println("<h5 style='text-align:center;'><a href='/webui'>ADMIN HOME</a>");
out.println("<a href='/webui/cnsuser?userId="+userId+"&topicArn="+topicArn+"'>BACK TO TOPIC</a></h5>");
out.println("</body></html>");
CMBControllerServlet.valueAccumulator.deleteAllCounters();
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doGet(request, response);
}
} |
<reponame>StarlitGhost/GBOxide
use std::str;
pub fn str_from_u8_null_utf8(utf8_src: &[u8]) -> Result<&str, str::Utf8Error> {
let null_range_end = utf8_src.iter()
.position(|&c| c == b'\0')
.unwrap_or(utf8_src.len());
str::from_utf8(&utf8_src[0..null_range_end])
}
|
Clinical Features of Kidney Transplant Recipients Admitted to the Intensive Care Unit
Introduction: There is a paucity of data regarding the complications in kidney transplant patients who may require intensive care unit (ICU) management, despite being the most common solid organ transplant worldwide. Objective: To identify the main reasons for ICU admission and to determine the factors associated with hospital mortality in kidney transplant recipients. Design: This single-center retrospective cohort study was conducted between September 2013 and June 2014, including all consecutive kidney transplant patients requiring ICU admission. We collected data on patient demographics, transplant characteristics, clinical data, and prognostic scores. The independent determinants of hospital mortality were identified by multiple logistic regression analysis. We also assessed the performance of Simplified Acute Physiology Score 3 (SAPS 3) and Acute Physiology and Chronic Health Evaluation II (APACHE II) scores. Results: We analyzed data from 413 patients, the majority of whom were admitted late after renal transplantation (1169 days; 63-3003 days). The main reason for admission was sepsis (33.2%), followed by cardiovascular disease (16%). Age (odds ratio 1.05, confidence interval , 1.01-1.09), SAPS 3 score (OR 1.04, CI, 1.01-1.08), the need for mechanical ventilation (OR 26.47, CI, 10.30-68.08), and vasopressor use (OR 3.34, CI, 1.37-8.13) were independently associated with hospital mortality. The performance of SAPS 3 and APACHE II scores was poor in this population and overestimated the mortality rates. Conclusion: Sepsis was the main reason for ICU admission in kidney transplant recipients, followed by cardiovascular disease. Age and disease severity were associated with hospital mortality. |
/*
* unregister a network filesystem from the cache
* - all cookies must have been released first
*/
void __fscache_unregister_netfs(struct fscache_netfs *netfs)
{
_enter("{%s.%u}", netfs->name, netfs->version);
down_write(&fscache_addremove_sem);
list_del(&netfs->link);
fscache_relinquish_cookie(netfs->primary_index, 0);
up_write(&fscache_addremove_sem);
pr_notice("Netfs '%s' unregistered from caching\n",
netfs->name);
_leave("");
} |
On differentially demodulated CPFSK
This paper develops a differential encoder for differentially demodulated continuous phase frequency shift keying (CPFSK). CPFSK schemes with modulation index h=K/P, where K and P are relatively prime positive integers, can be represented by a decomposed model consisting of a continuous phase encoder (CPE) and a memoryless modulator (MM). The differential encoder is shown to fit well with the CPE and form a decomposed model of differentially encoded CPFSK (DCPFSK). A basic receiver structure for differentially demodulating DCPFSK is presented along with simulation results. An exact formula for the minimum squared Euclidean distance (MSED) of differentially demodulated DCPFSK is also given. |
<gh_stars>10-100
/*
* $Id$
*
* Copyright (c) 2004-2005, 2011, Juniper Networks, Inc.
* All rights reserved.
* This SOFTWARE is licensed under the LICENSE provided in the
* ../Copyright file. By downloading, installing, copying, or otherwise
* using the SOFTWARE, you agree to be bound by the terms of that
* LICENSE.
*
* libxml -- XML portability layer
*/
#include <libxml/parser.h>
#include <libxml/tree.h>
#include <libxslt/xslt.h>
#include <libxslt/xsltInternals.h>
#include <libxslt/transform.h>
#include <libxslt/xsltutils.h>
/*
* Use these options for reading XML data
*/
#define LX_READ_OPTIONS (XML_PARSE_NONET | XML_PARSE_NODICT \
| XML_PARSE_NSCLEAN | XML_PARSE_NOCDATA)
/* Use our own typedefs */
#define XML_TYPEDEF(_struct, _typedef ) \
struct _struct; \
typedef struct _struct _typedef;
XML_TYPEDEF(_xmlDoc, lx_document_t);
XML_TYPEDEF(_xmlNode, lx_node_t);
XML_TYPEDEF(_xsltStylesheet, lx_stylesheet_t);
XML_TYPEDEF(_xmlNodeSet, lx_nodeset_t);
XML_TYPEDEF(_xmlXPathObject, lx_xpath_t);
typedef struct {
xmlSaveCtxt *context;
xmlBuffer *buffer;
} lx_output_t;
/* Helper types */
typedef int lx_cookie_t;
#define LX_COOKIE_CLEAR(_x) (*_x) = -1;
/*
* Simple error call
*/
#define LX_ERR(_msg...) xsltGenericError(xsltGenericErrorContext, _msg)
/*
* Perform any initialization required by the xml parser
*/
void lx_parser_init (void);
/*
* Perform any cleanup required by the xml parser
*/
void lx_parser_done (void);
/*
* Read a file into an in-memory XML document tree.
*/
lx_document_t *lx_document_read (const char *filename);
/*
* Read a document from the fd and return it
*/
lx_document_t *lx_document_read_fd (int fd, const char *filename);
/*
* Build a new document with the given root element
*/
lx_document_t *lx_document_create (const char *root);
/*
* Free a document
*/
void lx_document_free (lx_document_t *docp);
/*
* Find a node in a document
*/
lx_node_t *lx_document_find (lx_document_t *config, const char **path);
/*
* Find a node beneath another node
*/
lx_node_t *lx_node_find (lx_node_t *top, const char **path);
/*
* Return the root node of a document
*/
lx_node_t *lx_document_root (lx_document_t *docp);
/*
* Simple accessor for children
*/
lx_node_t *lx_node_children (lx_node_t *np);
/*
* Return the next node (sibling)
*/
lx_node_t *lx_node_next (lx_node_t *np);
/*
* Return the value of a (simple) element
*/
const char *lx_node_child_value (lx_node_t *parent, const char *name);
/*
* Read a stylesheet from a file descriptor
*/
lx_stylesheet_t *lx_style_read_fd (int fd, const char *filename);
/*
* Run a stylesheet on a document; return the results
*/
lx_document_t *lx_run_stylesheet (lx_stylesheet_t *slp, lx_document_t *docp,
const char **params);
/*
* Dump a copy of the results to the debug log.
*/
void lx_dump_results (lx_document_t *docp, lx_stylesheet_t *sp);
/*
* Dump a node to the debug log
*/
void
lx_dump_node (lx_document_t *docp, lx_node_t *nodep);
/*
* Interate thru a nodeset. Pass NULL for prev on the initial call.
*/
lx_node_t *
lx_nodeset_next (lx_nodeset_t *nodeset, lx_cookie_t *cookie);
/*
* Return the size of a nodeset
*/
unsigned long
lx_nodeset_size (lx_nodeset_t *nodeset);
/*
* Return a nodeset matching the xpath expression
*/
lx_nodeset_t *
lx_xpath_select (lx_document_t *docp, lx_node_t *nodep, const char *expr);
/*
* Open an output file
*/
lx_output_t *
lx_output_open (const char *filename);
/*
* Open an output file descriptor
*/
lx_output_t *
lx_output_open_fd (int fd);
/*
* Open an output buffer (string)
*/
lx_output_t *
lx_output_open_buffer (void);
/*
* Close an output file/buffer handle
*/
void
lx_output_close (lx_output_t *handle);
/*
* Return the output buffer as a string
*/
const char *
lx_output_buffer (lx_output_t *handle);
/*
* Free the output data structure and buffer (if any)
*/
void
lx_output_cleanup (lx_output_t *handle);
/*
* Write a document to a file
*/
void
lx_output_document (lx_output_t *handle, lx_document_t *docp);
/*
* Write a node to a file
*/
void
lx_output_node (lx_output_t *handle, lx_node_t *nodep);
/*
* Write a node to the trace file
*/
void
lx_trace_node (lx_node_t *nodep, const char *fmt, ...);
/*
* Write a document to a file to the trace file
*/
void
lx_trace_document (lx_document_t *docp, const char *fmt, ...);
/*
* Write the node's children to a file
*/
void
lx_output_children (lx_output_t *handle, lx_node_t *nodep);
|
package com.github.gogy.monitor.jvm;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.StringUtils;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import java.io.IOException;
/**
* @author yuanyi
* @date 2018/1/29
*/
@Slf4j
public class JVMMetricHelper {
private MBeanServerConnection connection;
private ObjectName objectName;
public JVMMetricHelper(MBeanServerConnection connection, ObjectName name) {
this.connection = connection;
this.objectName = name;
}
public String getString(String name) {
return (String)this.getAttribute(name, "", String.class);
}
public String[] getStringArray(String name) throws Exception {
return !StringUtils.hasText(name) ? null : this.getAttribute(name, new String[0], String[].class);
}
public Integer getInteger(String name) throws Exception {
return this.getAttribute(name, Integer.valueOf(0), Integer.class);
}
public Long getLong(String name) throws Exception {
return this.getAttribute(name, Long.valueOf(0L), Long.class);
}
public Double getDouble(String name) throws Exception {
return this.getAttribute(name, Double.valueOf(0.0D), Double.class);
}
private <T> T getAttribute(String name, T defaultValue, Class<T> cls) {
try {
if(StringUtils.isEmpty(name)) {
return defaultValue;
} else {
Object obj = this.connection.getAttribute(this.objectName, name);
if(cls.isInstance(obj)) {
return (T)obj;
} else {
log.warn("attribute type error:" + this.objectName + "/" + name + ", expect:" + cls + ", actual:" + obj.getClass().getName());
return defaultValue;
}
}
} catch (IOException e) {
throw new RuntimeException(e);
} catch (Exception e) {
log.warn("attribute exception:" + this.objectName + "/" + name + "e:" + e.getClass() + ", m:" + e.getMessage());
return defaultValue;
}
}
}
|
<gh_stars>1-10
mod two;
use rand::seq::SliceRandom;
use std::fs;
use std::io::Result;
use std::path::PathBuf;
use std::vec::Vec;
/// Generate <num_pairs> pairs of character images.
pub fn random_pairs(dataset: &str, num_pairs: usize) -> Result<Vec<(PathBuf, PathBuf)>> {
let scripts = fs::read_dir(dataset)?
.map(|res| res.map(|e| e.path()))
.collect::<Result<Vec<_>>>()?;
let mut pairs: Vec<(PathBuf, PathBuf)> = Vec::new();
macro_rules! sample {
( $func:expr $( , $more:expr )* ) => (
if let Ok(ps) = $func($( $more ),*) {
pairs.push(ps);
continue;
}
panic!();
);
}
for _ in 0..num_pairs {
// Probabilisticly, half will be pairs of the same character.
if rand::random::<bool>() {
if let Some(s) = scripts.choose(&mut rand::thread_rng()) {
sample!(two::sample_positive, s);
}
panic!();
}
if let Some(s1) = scripts.choose(&mut rand::thread_rng()) {
if let Some(s2) = scripts.choose(&mut rand::thread_rng()) {
if s1 == s2 {
sample!(two::sample_negative_same_script, s1);
}
sample!(two::sample_negative_diff_scripts, s1, s2);
}
}
}
Ok(pairs)
}
|
/**
* @brief hash function for sources
* Hash value is the pointer location of corresponding phrase
*/
class SourceEqual{
public:
bool operator()(const Source * s1, const Source * s2) const
{
if (s1->p == s2->p){
return true;
}
else return false;
}
} |
<gh_stars>10-100
package database
// Code generated by cdproto-gen. DO NOT EDIT.
// EventAddDatabase [no description].
type EventAddDatabase struct {
Database *Database `json:"database"`
}
|
An understanding of the electrophilic/nucleophilic behavior of electro-deficient 2,3-disubstituted 1,3-butadienes in polar diels-alder reactions. A density functional theory study.
The electrophilic/nucleophilic behavior of dimethyl 2,3-dimethylenesuccinate 1, an electron-deficient 2,3-disubstituted 1,3-butadiene, in polar Diels-Alder reactions has been studied using DFT methods at the B3LYP/6-31G(d) level of theory. The electronic nature of bonding of the transition structures involved in the cycloaddition reactions of the diene 1 toward the nucleophilically activated dienophile 6 and the strong electrophilically activated dienophile 7 has been carefully examined within the natural bond orbital (NBO) and the topological analysis of the electron localization function (ELF) frameworks. Additionally, a study of the global electrophilicity pattern of the reagents at the ground state was performed. This evidence allows us to rationalize the participation of electron-deficient 2-susbtituted and 2,3-disubstituted 1,3-butadienes as nucleophiles in polar Diels-Alder reactions. |
The Steady‐State Dipole‐Flow Test for Characterization of Hydraulic Conductivity Statistics in a Highly Permeable Aquifer: Horkheimer Insel Site, Germany
Over the last decade the dipole‐flow test (DFT) evolved from the general idea of using recirculatory flow to evaluate aquifer properties, to the development of prototype instrumentation and feasibility studies, to a reliable tool for characterization of aquifer heterogeneity. The DFT involves the interpretation of head in recirculatory flow between injection and extraction sections (chambers) in a single well isolated from each other by a multipacker system. In this study, the steady‐state dipole flow test (DFT) has been used to characterize the statistics of horizontal hydraulic conductivity (Kr) of the highly permeable, heterogeneous, and thin aquifer at the Horkheimer Insel site, Germany. In previous studies, Kr estimates were based on the steady‐state head difference between chambers. A new by‐chamber interpretation is proposed that is based on drawdown within each individual chamber. This interpretation yields more detailed information on structure of heterogeneity of the aquifer without introducing complexity into the analysis. The DFT results indicate that Kr ranges from 49 to 6000 m/day (mean In Kr ∼—4, and variance of In Kr ∼ 1–2). Descriptive statistics from the DFT compare well with those from previous field and laboratory tests (pumping, borehole flowmeter, and permeameter tests and grain‐size analysis) at this site. It is shown that the role of confining boundaries in the DFT interpretation is negligible even in this case of a thin (< 4 m thick) aquifer. This study demonstrates the flexibility of the DFT and expands the potential application of this method to a wide range of hydrogeologic settings. |
def media_discoverer_new(self, psz_name):
return libvlc_media_discoverer_new(self, str_to_bytes(psz_name)) |
Intelligent Reflecting Surface Enhanced Wireless Network: Two-timescale Beamforming Optimization
Intelligent reflecting surface (IRS) has drawn a lot of attention recently as a promising new solution to achieve high spectral and energy efficiency for future wireless networks. By utilizing massive low-cost passive reflecting elements, the wireless propagation environment becomes controllable and thus can be made favorable for improving the communication performance. Prior works on IRS mainly rely on the instantaneous channel state information (I-CSI), which, however, is practically difficult to obtain for IRS-associated links due to its passive operation and large number of elements. To overcome this difficulty, we propose in this paper a new two-timescale (TTS) transmission protocol to maximize the achievable average sum-rate for an IRS-aided multiuser system under the general correlated Rician channel model. Specifically, the passive IRS phase-shifts are first optimized based on the statistical CSI (S-CSI) of all links, which varies much slowly as compared to their I-CSI, while the transmit beamforming/precoding vectors at the access point (AP) are then designed to cater to the I-CSI of the users' effective channels with the optimized IRS phase-shifts, thus significantly reducing the channel training overhead and passive beamforming complexity over the existing schemes based on the I-CSI of all channels. For the single-user case, a novel penalty dual decomposition (PDD)-based algorithm is proposed, where the IRS phase-shifts are updated in parallel to reduce the computational time. For the multiuser case, we propose a general TTS optimization algorithm by constructing a quadratic surrogate of the objective function, which cannot be explicitly expressed in closed-form. Simulation results are presented to validate the effectiveness of our proposed algorithms and evaluate the impact of S-CSI and channel correlation on the system performance.
I. INTRODUCTION
Massive multiple-input multiple-output (MIMO) technology can achieve high spectral efficiency for wireless communication by exploiting highly directional beamforming and spatial This article was presented in part at the IEEE 11th Sensor Array and Multichannel Signal Processing Workshop (SAM) 2020 .
M. M. Zhao multiplexing gains . However, equipping a large number of antennas may lead to more circuit energy consumption and higher hardware cost, especially as the wireless system evolves into the new era of millimeter-wave (mmWave) communications . Recently, intelligent reflecting surface (IRS) (also known as reconfigurable intelligent surface (RIS) and so on) has been proposed as a new solution to achieve high spectral efficiency with low energy and hardware cost - . Specifically, IRS is a passive array composed of a large number of passive reflecting elements, which can induce phase shift and/or amplitude change of the incident signal independently, thus collaboratively creating a favorable wireless signal propagation environment to enhance the communication performance. In addition, since such passive elements do not require any transmit radio frequency (RF) chains, their energy and hardware cost is much lower as compared to that of the traditional active antennas at the base stations (BSs), access points (APs), and relays. As a result, they can be densely deployed in wireless networks with a scalable cost, and yet without causing any interference to each other provided that they are deployed sufficiently far apart. Moreover, it is practically easy to integrate IRSs into the existing cellular or WiFi systems as there is no need to modify their existing infrastructure and operating standards . All the above advantages make IRS a promising technology for future wireless systems, particularly for indoor/hot-spot coverage and cell-edge performance enhancement.
IRS has been investigated recently in various aspects and under different setups, such as passive beamforming designs - , IRS-aided orthogonal frequency division multiplexing (OFDM) system , , IRS-aided mmWave communications , physical layer security - , wireless power transfer - , and so on. Particularly, showed that IRS is able to create a "signal hot spot" in its vicinity with an asymptotic power gain in the order of N 2 , where N denotes the number of IRS reflecting elements. Moreover, further showed that even with practical discrete phase shifters at the IRS, the same squared power gain of N 2 is achievable with only a constant power loss in dB depending on the number of phase-shift levels at each reflecting element, which becomes negligible as N becomes very large. Therefore, significant performance gains can be achieved with IRS as compared to conventional wireless systems without using IRS.
To fully realize the potentials of IRS-aided wireless systems, accurate channel state information (CSI) of the AP-IRS and IRS-user links are essential for optimizing the reflection coefficients. However, as the number of reflecting elements is usually very large, obtaining the accurate CSI of the AP-IRS and IRS-user links is practically difficult. As a result, how to effectively estimate the IRS-associated channels with low training/signaling overhead while still reaping most of the performance gain offered by IRS becomes a crucial issue. In the literature, there are some recent works that studied the channel estimation problem for IRS-aided wireless systems , , , , . Specifically, in and , a binary reflection controlled least-square (LS) channel estimation method was proposed, where N training symbols are needed to estimate the channel coefficients associated with the IRS. In , an IRS element-grouping method was also proposed to reduce the training overhead, at the cost of degraded passive beamforming performance of the IRS. The authors in proposed a reflection pattern based channel estimation method for an IRS-enhanced OFDM system, which was shown to have superior mean squared error (MSE) channel estimation performance than that in , with the same amount of training symbols. This work was further extended to the discrete phase-shift case in . In , the low-rank structure of the massive MIMO channel was exploited and the cascaded channel estimation problem for IRS was addressed by leveraging the combined bilinear factorization and matrix completion.
It is worth pointing out that in the aforementioned studies, the beamforming vectors are mainly designed based on the instantaneous CSI (I-CSI). In practice, this approach will incur high signal processing complexity and large training/signaling overhead. Moreover, most of the existing works on IRSaided wireless systems assume that the phase shifts of the reflecting elements can be continuously adjusted. However, discrete phase-shift controls are usually desired in practice in order to lower the implementation cost of IRS .
To tackle the above challenges, we propose in this paper a two-timescale (TTS) joint active and passive beamforming scheme for an IRS-aided multiuser multiple-input singleoutput (MISO) system with practical discrete phase shifts at the IRS. In the considered system, we adopt the general correlated Rician fading channel to model the various links between the AP, IRS and users. The active precoding vectors at the AP and passive phase shifts at the IRS are jointly optimized to maximize the long-term average weighted sumrate of the users. Moreover, in order to alleviate the high signal processing complexity and training overhead for acquiring the I-CSI, we propose a practical transmission protocol based on the measured channel statistics 1 and TTS beamforming optimization. Specifically, we assume that the IRS is equipped with N dedicated sensors/receiving circuits for statistical CSI (S-CSI) estimation, which is easier to implement as compared to accurately tracking the I-CSI at the IRS that varies much faster than its S-CSI. Once the S-CSI is estimated and fed back to the AP, the AP performs the optimization of the IRS long-term phase shifts based on it and sends their values to the IRS, which sets the phase shifts accordingly for the subsequent time slots regardless of the instantaneous channel variations, as long as the S-CSI remains unchanged (e.g., in the case of a quasi-static user in the vicinity of the IRS). In the meanwhile, at each time slot, the short-term transmit precoding vectors at the AP are dynamically designed to cater to the effective I-CSI with fixed IRS phase shifts.
In particular, we first consider the single-user case for the purpose of exposition and drawing useful insights. By deriving an upper bound of the achievable average rate, we show that the original stochastic optimization problem can be transformed into a deterministic non-convex optimization problem. To tackle this new problem, instead of resorting to the commonly used semidefinite relaxation (SDR) method or the successive refinement algorithm based on the block coordinate descent (BCD) method , we propose a new algorithm by leveraging the penalty dual decomposition (PDD) technique , which enables updating the optimization variables in parallel and thus can potentially reduce the computational time substantially as compared to the algorithms in , if a multi-core processor with parallel computing capability is available. 2 Numerical results show that the proposed PDD-based algorithm can achieve near-optimal performance. Furthermore, it is found that as the channel deterministic components become dominant and/or the channel correlation is high in the considered channel model, the rate loss of the proposed TTS optimization with S-CSI as compared to that assuming ideal I-CSI is greatly reduced.
Next, we consider the general multiuser case. Different from the single-user case, deriving closed-form expressions of the achievable average rates of all users in terms of the IRS phase shifts only is difficult because we are unable to obtain the optimal transmit precoding vectors as explicit functions of the IRS phase shifts. To make the problem tractable, we propose an iterative TTS stochastic successive convex approximation (SSCA) algorithm, where in each iteration, a quadratic surrogate of the objective function is constructed based on some appropriately generated channel realizations/samples and the current phase shifts. Then, by employing the Lagrange dual method to solve the resultant quadratic optimization problem, the phase shifts are iteratively updated with low complexity. On the other hand, with fixed IRS phase shifts, the short-term transmit precoding optimization problems over different channel realizations are efficiently solved by applying the weighted minimum mean-squared error (WMMSE) algorithm . Numerical results validate the effectiveness of the proposed algorithm and show that using IRS with practical discrete phase shifts under S-CSI can still improve the rate performance significantly over the conventional system without IRS. Moreover, we draw useful insights into the effects of the channel deterministic components and correlation on the proposed TTS algorithm performance.
To the best of our knowledge, this is the first work on the TTS beamforming optimization for IRS-aided communication systems and the new contributions of this paper in view of the existing literature are summarized as follows: 1) A new TTS joint active and passive beamforming scheme for an IRS-aided multiuser MISO system is proposed to reduce the channel training overhead and passive beamforming design complexity, where the long-term discrete IRS phase shifts are optimized based on S-CSI and the short-term transmit precoding vectors at the AP are designed according to the effective I-CSI. 2) To solve the considered TTS optimization problem efficiently, a new PDD-based algorithm and a new SSCA algorithm are respectively proposed for the single-user and multiuser cases. Both algorithms constitute efficient variable updating steps, which either admit closed-form solutions or can be carried out via simple iterative procedures.
3) Extensive numerical results are presented to validate the effectiveness of the proposed TTS transmission protocol and algorithms. The impacts of the IRS channel deterministic Rician components and correlation coefficients on the system performance are investigated and useful insights are drawn.
The rest of the paper is organized as follows. In Section II, we present the system model, the proposed transmission protocol and the corresponding TTS problem formulation. In Sections III and IV, we propose efficient algorithms to solve the TTS problems in the single-user and multiuser cases, respectively. In Section V, numerical results are provided to evaluate the performance of the proposed algorithms. Finally, we conclude the paper in Section VI.
Notations: Scalars, vectors and matrices are respectively denoted by lower/upper case, boldface lower case and boldface upper case letters. For an arbitrary matrix A, A T , A * and A H denote its transpose, conjugate and conjugate transpose, respectively, and A −1 denotes the inverse of a square matrix A. sum(A) denotes the summation of all elements in A and A(m, n) denotes the element on the m-th row and n-column of matrix A. · and · ∞ denote the Euclidean norm and infinity norm of a complex vector, respectively, and | · | denotes the absolute value of a complex scalar or the cardinality of a finite set. ⊙ and ⊗ denote the Hadamard product and the Kronecker product. For any numbers x 1 , · · · , x N , diag(x 1 , · · · , x N ) denotes a diagonal matrix with x 1 , · · · , x N being its diagonal elements and diag(A) denotes a vector which contains the diagonal elements of matrix A. The letter j will be used to represent √ −1 when there is no ambiguity. For a complex number x, ℜ{x} denotes its real part. C n×m denotes the space of n × m complex matrices. I and 1 denote an identity matrix and an all-one matrix/vector with appropriate dimensions, respectively. E{·} represents the statistical expectation operator. ⌊x⌋ denotes the maximum integer no larger than x. The set difference is defined as A\B {x|x ∈ A, x / ∈ B}.
A. System Model
As shown in Fig. 1, we consider a multiuser MISO downlink communication system where an IRS equipped with N reflecting elements is deployed to enhance the communications from an AP with M antennas to K single-antenna users. The users are assumed to be in the vicinity of the IRS and of low mobility. The IRS is attached to a smart controller that is able to communicate with the AP via a separate backhaul link for coordinating transmission and exchanging information, such as CSI and IRS phase shifts . Since the signal transmitted through the AP-IRS-user link suffers from the double path loss, the signals reflected by IRS two or more times are ignored , .
Let G ∈ C N ×M , h r,k ∈ C N ×1 and h d,k ∈ C M×1 denote the baseband equivalent channels of the AP-IRS, IRS-user k (k ∈ K {1, · · · , K}) and AP-user k links, respectively. Then, the received signal of user k can be expressed as where Θ denotes an N × N diagonal reflection coefficient matrix (also known as the passive beamforming matrix ), which can be written as Θ = diag(φ 1 , φ 2 , · · · , φ N ), φ n = a n e jθn , a n ∈ , θ n ∈ theoretically , it is practically costly to control {a n } and {φ n } independently and simultaneously. Therefore, we assume a n = 1, ∀n ∈ N in this paper to maximize the signal reflection of the IRS , , . Let Q denote the number of control bits for phase-shifting per IRS element and by assuming that the discrete phase-shift values are obtained by uniformly quantizing the interval . Moreover, since the distances between the IRS and its served users are relatively small, IRS elements reflect signals with a finite angular spread and a user-location dependent mean angle in practice . Therefore, we assume that the channel statistics of the IRSuser links are user-location dependent. In contrast, since the distances between the AP and users are much larger than those between the IRS and users, we assume that the secondorder statistics of the AP-user links are identical for all users. Specifically, the channel of the IRS-user k link can be modeled as where z r,k ∈ C N ×1 has i.i.d. CSCG entries with zero mean and unit variance accounting for small-scale fading (assumed to be Rayleigh fading); Φ r,k ∈ C N ×N is the spatial correlation matrix between the IRS and user k;z r,k and β Iu denote the deterministic component and the Rician factor, respectively. Similarly, for AP-IRS and AP-user links, we have whereF andz d,k denote the deterministic components, F ∈ C N ×M and z d,k ∈ C M×1 denote the corresponding small-scale fading components similar as z r,k , Φ d and Φ r denote the AP transmit correlation matrix and the IRS receive correlation matrix, respectively. As we consider low-mobility users, the spatial correlation matricesΦ {Φ r , Φ r,k , Φ d } (assumed to be real matrices) and deterministic components H LoS {z r,k ,z d,k ,F}, i.e., the S-CSI, may change slowly in practice. In contrast, the I-CSI can vary much more rapidly due to the phase variations induced by the relatively slight movement of the users and/or the scattering objects in the environment. Note that there are two types of "correlation" in the considered channel model. One is due to the existence of deterministic components while the other is due to the scattering environment and the antenna/reflecting element configurations. 3 For simplicity, we integrate the terms involving the Rician factors into {z r,k , z r,k ,F, F,z d,k , z d,k }, then the channel models can be equivalently and more concisely rewritten as h r,k =z r,k + Φ
C. Transmission Protocol
Since the acquisition of the effective fading channels {h k G H Θ H h r,k + h d,k } from the AP to users with given fixed IRS phase shifts is much easier in practice as compared with that of the IRS-associated channels G and {h r,k }, we propose a hierarchical transmission protocol in this paper. Specifically, we focus on a time interval within which the S-CSI of all links is assumed to remain constant, as shown in Fig. 2. The considered time interval consists of T s ≫ 1 time slots and can be divided into three transmission phases. The small-scale fading coefficients {z r,k }, {z d,k } and F are assumed to be constant within each time slot (or equivalently, the I-CSIH {h d,k , h r,k , G}). Each time slot is further divided into two sub-slots where the first sub-slot is for effective fading channel estimation and the second is for data transmission.
In the first phase, the IRS is in the sensing mode and the channel statistical information between the IRS and the AP/users can be estimated by resorting to the dedicated sensors/receiving circuits at the IRS and leveraging the pilots and/or data transmitted in both uplink and downlink using standard mean and covariance matrices estimation techniques , . 4 Note that in this phase, the AP serves the users by only utilizing the direct channels {h d,k } estimated in the first sub-slot of each time slot as if the IRS does not exist. The direct channels {h d,k } in this phase are equivalent to the effective fading channels {h k } since the IRS is in the sensing mode. In the second phase, based on the measured S-CSI of the AP-IRS-user links (fed back by the IRS) and that of the AP-user links (measured in Phase I), the AP computes the IRS passive beamforming matrix Θ, and sends it to the IRS through the dedicated backhaul link. Finally, in the third phase, the IRS is switched to the reflection mode with the phase shifts given in Θ to enhance the transmissions from the AP to the users. Specifically, for each time slot during this phase, the AP estimates the effective I-CSI {h k } by applying the channel estimation methods in traditional MIMO systems and designs its transmit precoding vectors {w k } accordingly. 5 Note that since the number of reflecting elements at the IRS, N , is usually much larger than that of transmit antennas at the AP, the effective CSI {h k } usually has a much smaller dimension than the full channel ensembleH. Therefore, compared to the existing transmit and reflect beamforming optimization in e.g., , , , , based on the I-CSI of all channels, the beamforming design complexity and channel estimation overhead can be significantly reduced by the proposed new protocol based on S-CSI. Besides, for fast-varying channels, using I-CSI may not be helpful as previously acquired I-CSI will become outdated quickly, which renders the proposed S-CSI-based protocol more suitable.
D. Problem Formulation
In this paper, we aim to maximize the average weighted sum-rate of all the users by jointly optimizing the short-term active transmit precoding at the AP and long-term passive reflect beamforming at the IRS, subject to the maximum transmit power constraint at the AP. The corresponding optimization problem can be formulated as where α k represents the weight/priority of user k and P denotes the total transmit power budget. The inner ratemaximization problem in (7) is over the short-term transmit precoding in each time slot/channel realization for given phase shifts Θ at the IRS, while the outer rate-maximization problem is over the long-term IRS phase shifts, where the expectation is taken over all channels' random realizations within the considered time interval. Furthermore, let Ω {w k (H) ∈ X , ∀H} denote the set of transmit precoding vectors (each as a function of the random instantaneous channelH) that satisfy the constraint k∈K w k 2 ≤ P , and definer(Θ, Ω) = T as the achievable average rate vector, wherer k (Θ, Ω) = E{r k (Θ, {w k (H)},H)}. 6 Then, problem (7) can be rewritten in a more compact form as where α T and F N is defined as the Cartesian product of N identical sets each given by F . Problem (8) is challenging to solve because 1) the short-term transmit precoding vectors {w k } and the long-term IRS phase shifts Θ are intricately coupled in the objective function; 2) a closedform expression of the achievable average rate of each user, r k (Θ, Ω), for given either Θ or Ω, is difficult to obtain in general; and 3) it is a mixed-integer non-linear program (MINLP) even for K = 1. Generally, there is no efficient method for solving the non-convex problem (8) optimally. In the next two sections, we propose two efficient algorithms to solve problem (8) sub-optimally in the single-user and multiuser cases, respectively.
III. SINGLE-USER CASE
In this section, we consider the single-user case, i.e., K = 1, where there is no multiuser interference. Accordingly, problem (8) reduces to (by dropping the user index) Note that in , a similar problem to (9) was considered for the single-user case by exploiting the S-CSI, while in this section we address problem (9) under our considered fading channel model which is more general than that in . For any given phase-shift matrix Θ, it is well-known that the maximum-ratio transmission (MRT) at the AP is optimal , , i.e., Based on (10), we obtain the following proposition.
Proposition 1. In the single-user case, the achievable ratē r(Θ, Ω) in (9) is upper-bounded by λ i denotes the i-th eigenvalue of Φ d , and Φ r,u denotes the correlation matrix between the IRS and the user.
Proof. Please refer to Appendix A.
Based on Proposition 1, we can remove the log 2 (·) operator in the logarithmic rate upper-bound function due to its monotonicity and ignore the constant terms, then problem (9) can be approximated by the following deterministic problem: For problem (12), it can be shown that when β Au = β AI = β Iu → ∞, the objective function is reduced to v H diag{z H r }FF H diag{z r }v + v H b + b H v and in this case, the IRS phase-shift vector v is optimized based on the deterministic component H LoS only, i.e., {z r ,z d ,F}. In contrast, when β AI = β Iu → 0, i.e., in the case of NLoS environment, the optimal solution to (12) is shown in Appendix B to be v =φ1, for any phase shift |φ| = 1, i.e., the phase shifts of all elements at the IRS should be identical. In general, v should be properly designed to strike a balance between the deterministic and NLoS channels.
Although problem (12) is much simplified compared to problem (9), it is still a non-convex quadratic programming problem with discrete constraints that is NP-hard in general. In and , various methods were proposed to address a similar problem by using e.g., the SDR method and the BCD method. However, the SDR method incurs a high complexity and the BCD method requires to update the phase shifts oneby-one iteratively. To reduce their computational time, we propose a new and alternative algorithm in this paper, namely, the PDD-based algorithm, which enables the optimization of IRS phase shifts in parallel. Besides, the proposed PDDbased algorithm is able to handle the discrete IRS phase-shift constraints and each of its iterations can be executed in closedform, as shown next.
A. Proposed PDD-based Algorithm
To facilitate the optimization of v in parallel, we introduce an auxiliary variable u = T ∈ C N ×1 (u n = c n e jϑn ), which satisfies u = v. As a result, problem (12) is equivalent to To address problem (13), we propose the PDD-based algorithm consisting of two loops. In the outer loop, we update the dual variable associated with the constraint v = u and the penalty parameter (as will be introduced later in this subsection), while in the inner loop, we apply the block successive upper-bound minimization (BSUM) method to iteratively optimize the primal variables in different blocks. Specifically, we can write the augmented Lagrangian (AL) problem of (13) as follows , : where ρ is the penalty parameter and λ = T denotes the dual variable vector associated with the constraint v = u. Note that v 2 ≤ N is a new constraint that is added without loss of optimality due to |v n | ≤ 1 and the necessity of this constraint will be clarified later. Then, we partition all the optimization variables in (14) into two blocks, i.e., v and u, and optimize them iteratively in the inner loop as follows.
Step 1 (optimizing v for given u): this subproblem is given by Since its constraint is convex and the objective function can be expressed as a difference of two convex functions when u is fixed, we can apply the BSUM method to solve it approximately. Specifically, by resorting to the first-order Taylor expansion at a given pointv, problem (15) can be approximated by It is not difficult to observe that problem (16) is now a convex quadratically constrained quadratic program (QCQP) with only one constraint whose optimal solution can be obtained by exploiting the first-order optimality condition as follows: where c = u − ρλ + 2ρΦv + 2ρb. Note that if the constraint v 2 ≤ N is absent, one has to set ρ ≤ 1 2λmax(Φ) in order to make problem (15) bounded. Therefore, if λ max (Φ) is large, then the initial penalty (i.e., 1 2ρ ) is large, which will severely restrict the search space of the proposed algorithm. Thus, the constraint v 2 ≤ N is necessary.
Step 2 (optimizing u for given v): this subproblem can be written as (ignoring constant terms) Due to the fact that {u n } are decoupled in both the objective function and the constraints of problem (18), we can easily obtain the optimal phase shifts of this subproblem in parallel as follows: first obtain the optimal continuous phase-shift solution as ϑ n = ∠(v n + ρλ n ), and then map ϑ n to the nearest discrete value in {0, 2π L , · · · , 2π(L−1) L }. Next, we consider the outer loop, where the dual variable λ can be updated by which is a dual ascend step. The main steps of the proposed PDD-based algorithm are summarized in Algorithm 1, where c < 1 is a constant scaling factor that is used to increase the value of the penalty term involved in problem (14) in each outer iteration. We note that the penalty parameter ρ can affect the convergence of the PDD-based algorithm. Specifically, if ρ decreases too fast, the AL problem (14) will become ill-conditioned and it may lead to undesired results or stuck to some unfavorable points; on the other hand, if ρ decreases too slow, it may affect the convergence speed of the PDD-based algorithm. Therefore, the parameter c should be appropriately chosen to control the decreasing speed of ρ. 7 Besides, according to , Algorithm 1 is guaranteed to converge to a set of stationary solutions of problem (12) in the continuous phase-shift case. While for the discrete phase-shift case, the solution obtained by solving problem (14) always satisfies the equality constraint v = u, as ρ → 0 ( 1 ρ → ∞) . Therefore, Algorithm 1 is able to converge to a highquality suboptimal solution, as will be verified later in Section V.
Algorithm 1 Proposed PDD-based Algorithm for Solving Problem (9) 1: Initialize v 0 , u 0 and c, set the outer iteration number iout ← 0. Update v and u according to Steps 1-2.
7:
until The fractional decrease of the objective value of (14) is below a certain threshold ǫin > 0 or the maximum number of inner iterations is reached. 8: Update the dual variable by (19) and decrease the penalty parameter as ρ ← cρ.
B. Complexity Analysis
The complexity of Algorithm 1 is mainly due to solving problem (16), which can be shown of O(N 2 ). Thus, the overall complexity of Algorithm 1 is O(I o I i N 2 ), where I o and I i denote the maximum outer and inner iteration numbers. In contrast, the worst-case complexity of the SDR method in is O(N 6.5 ) and that of the successive refinement algorithm in is O (IN 2 ), where I denotes the number of iterations required for convergence. To summarize, the complexity of the proposed Algorithm 1 is much lower than the SDR method. Besides, although Algorithm 1 and the successive refinement algorithm exhibit the same complexity order, using Algorithm 1 can reduce the computational time, especially for practically large IRS, if a multi-core processor is available (see the parallel update in Step 2 of Section III-A).
IV. MULTIUSER CASE
In this section, we address the multiuser case where multiple users are assumed to share the same time-frequency resource and the multiuser interference exists in general. Specifically, we leverage the stochastic optimization framework in to propose a novel SSCA algorithm, where the phase shifts at the IRS (i.e., the long-term variables) are updated by solving the outer rate-maximization problem in (7) with randomly generated channel samples, and the transmit precoding vectors at the AP (i.e., the short-term variables) are optimized in each time slot by applying the WMMSE method . 7 In our simulations, we find that choosing an arbitrary value of c from the interval will not lead to significant performance variations, which means that the proposed PDD-based algorithm is quite robust under different values of c.
A. Short-Term Optimization Problem
At each time slot m ∈ , the AP first acquires the effective fading channel H(m) {h 1 (m), · · · , h K (m)} with fixed phase shifts v. Then, the AP designs the short-term transmit precoding vectors {w k }, by applying the WMMSE method to solve the following problem, for given H(m). Note that {w k } are optimized based on the effective fading channels {h k } only. The basic idea of the WMMSE method is to first transform problem (20) into an equivalent WMMSE optimization problem, and then update the optimization variables alternately until convergence is achieved. The details of this method can be found in where it shows that a stationary solution of problem (20) can be obtained; thus, they are omitted for brevity.
B. Long-Term Optimization Problem
When the S-CSI is obtained, the AP optimizes the IRS phase shifts v by solving problem (8). Note that unlike the single-use case for which the closed-form MRT-based optimal transmit precoding is available, the optimized precoding vectors via WMMSE in the multiuser case cannot be expressed explicitly, thus it is difficult to obtain the closed-form expression of α Tr (as well as its lower or upper bounds) in terms of v. To address this issue, we propose an efficient algorithm, where v is updated iteratively by maximizing a concave surrogate function of α Tr , denoted byf t (v), with t denoting the iteration index. Furthermore, we relax the amplitudes of v to be in the interval , which will be shown to help accelerate the convergence of the proposed algorithm by simulation in Section V. Note that we can simply set v n ← e j∠vn , ∀n ∈ N to recover the unit-modulus solution of v after the convergence is reached. Let v t−1 denote the IRS phase-shift vector obtained from the (t − 1)-th iteration. Then the t-th iteration of the proposed algorithm, for any t ≥ 1, is described as follows.
First, T H new channel samples {H t (l)} l={1,··· ,TH } {h r,k (l), h d,k (l), G(l)} t l={1,··· ,TH } are randomly generated according to the S-CSIΦ and H LoS . Based on them, we update the surrogate function to obtainf t (v), which can be viewed as a concave approximation of the objective function α Tr of problem (8). Specifically, based on {H t (l)} l={1,··· ,TH } and the phase-shift vector v t−1 ,f t (v) is obtained as where the last term is added to ensure that −f t (v) is uniformly and strongly convex with respect to (w.r.t.) v so as to guarantee the convergence of the proposed algorithm with any constant τ > 0;r t = T is an approximation of the achievable average rate vector, which is updated aŝ withr 0 k = 0, ∀k ∈ K, ρ t satisfies Assumption 1 (i.e., Assumption 5 in ), which will be specified later, w t k (l) denotes the transmit precoding vector corresponds to the lth generated channel sample with fixed v t−1 , i.e., w t k (l) w k (v t−1 ,H t (l)), and f t = T is an approximation of the partial derivative ∇ v * α Tr , which can be similarly updated as is the Jacobian matrix of the achievable rate vector r(v, {w k };H) T w.r.t. v * and its detailed expression is given in Appendix C. Note that F t is an H);H)}. The iterative approximationsf t (v) and f t can converge to the true values of the objective function α Tr of problem (8) and its gradient w.r.t. v * , as t → ∞ . Therefore, based on the randomly generated channel samples {H t (l)} at the beginning of each iteration and the corresponding solutions {w t k (l)} of the short-term problems, the achievable average rater(v, {w k };H), although not expressed explicitly, can be approximated by updatingr and ∇ v * α Tr in an iterative manner as in (22) and (23). With (21), we only need to solve the following quadratic optimization problem, which is convex and its optimal solution can be obtained in closed-form as shown next.
Remark 1. Note that to make the overall problem tractable, we have ignored the discrete constraints and relaxed {v n } as continuous variables in problem (24). After obtaining the optimized IRS phase shifts v, we project each of its entries independently onto F to obtain a unit-modulus solution, i.e., v n = arg min vn∈F |∠v n − ∠v n |, ∀n ∈ N .
It was shown in that using Q = 2 or 3 bits is practically sufficient to achieve near-optimal performance. In this paper, although we consider a different problem, it will be shown in Section V that when Q ≥ 2 bits, the performance loss due to discrete phase shifts is also negligible.
Note that in problem (24), all the optimization variables {v n } can be fully decoupled and thus we can optimize each of them independently in parallel. As such, problem (24) w.r.t. v n can be equivalently rewritten as (by ignoring constant terms) which is a convex optimization problem. By resorting to the Lagrange duality method, we can obtain the Lagrangian dual function as , where λ is the dual variable associated with the constraint in (26). Then, the optimal solution of problem (26) can be obtained in closed-form as follows: if |v t−1 n + f t n τ | ≤ 1, we havev t n = v t−1 n + f t n τ ; otherwise, we havev t n = τ v t−1 n +f t n τ +λ opt , where λ opt denotes the optimal dual variable and is given by λ opt = |τ v t−1 n + f t n | − τ . Therefore, the long-term IRS phase shifts v can be updated according where γ t is an iteration-dependent constant that satisfies the following assumption (referred to as Assumption 1): The above procedure is repeated until convergence and the overall algorithm is summarized in Algorithm 2.
The convergence of Algorithm 2 is analyzed as follows. Consider problem (7) with continuous phase shifts and adjustable amplitudes between and refer to it as problem C. If problem C has at least one stationary solution, then every limit point v lim of the sequence {v t } ∞ t=1 generated by Algorithm 2 is a stationary point of the long-term (outer rate-maximization) problem of C when the transmit precoding vectors are obtained by the WMMSE method. Besides, Algorithm 2 almost surely converges to the set of stationary solutions of problem C and the detailed proof can be found in , . Besides, the advantages of the proposed Algorithm 2 are summarized as follows: 1) by iteratively constructing a surrogate function based on randomly generated channel samples and the corresponding solutions of the short-term optimization problems, it is able to resolve the difficulty caused by the unavailability of the closed-form expression of α Tr , 2) the long-term IRS phase shifts v and shortterm transmit precoding vectors {w k } are jointly optimized to maximize α Tr , and 3) the optimization can be conducted through a sequence of simple and efficient updates on the variables.
Step 1: (Long-term optimization with given S-CSI): • Generate TH new channel samples according to the known S-CSIΦ and HLoS. • Update the surrogate function by (21), where {w t k (l)} are obtained by applying the WMMSE method to solve problem (20) with given generated channel samples and fixed v t−1 . • Solve problem (26) to obtain the optimalv t and update v t according to (27). • Let t = t + 1 and return to Step 1. Repeat the above until convergence. Denote the converged phase-shift vector as v.
Step 2: (Short-term optimization at each time slot m ∈ ): • Apply the WMMSE method with given v and H(m) to obtain the short-term variables {w k }.
C. Complexity Analysis
From the above, it is observed that the complexity of Algorithm 2 is mainly due to computing {w k (l)} l={1,··· ,TH } for the generated channel samples in the long-term optimization problem. For each l ∈ {1, · · · , T H }, the WMMSE method is applied to obtain the corresponding transmit precoding vectors, whose complexity is dominated by the matrix inversion operation required for updating {w k }, which is O(JKM 3 ), where J denotes the number of WMMSE iterations. Accordingly, the complexity for updating the long-term IRS phase-shift vector v is O(I(T H JKM 3 + KN M )), where I denotes the iteration number required for the phase-shift optimization in Section IV-B. Therefore, the overall complexity of Algorithm 2 is given by O(I(T H JKM 3 + KN M ) + T s JKM 3 ).
V. SIMULATION RESULTS
In this section, we provide numerical results to evaluate the performance of the proposed algorithms and draw useful insights. The distance-dependent path loss is modeled as where C 0 is the path loss at the reference distance D 0 = 1 meter (m), d link represents the individual link distance and α denotes the path loss exponent. The path loss exponents of the AP-user, AP-IRS and IRS-user links are denoted by α Au , α AI and α Iu , respectively. We assume that the IRS is deployed to serve the users that suffer from severe signal attenuation in the AP-user direct link and thus we set α Au = 3.4, α AI = 2.2 and α Iu = 3, i.e., the path loss exponent of the AP-user link is larger than those of the AP-IRS and IRS-user links. In our simulations, a three-dimensional coordinate system is considered where the AP (equipped with a uniform linear array (ULA)) and the IRS (equipped with a uniform rectangular array (UPA)) are located on the x-axis and y-z plane (or parallel to the x-z plane), respectively. In the single-user case, we set N = N y N z where N y and N z denote the numbers of reflecting elements along the y-axis and z-axis, respectively, while in the multiuser case, we set N = N x N z with N x denoting the number of reflecting elements along the x-axis. For the purpose of exposition, we fix N y = 4 in the single-user case and N x = 4 in the multiuser case. The reference antenna/element at the AP/IRS are located at (d v , 0, 0) and (0, d 0 = 50 m, 3 m). Moreover, we consider the following exponential correlation model for Φ d - : , where Φ h r and Φ v r denote the spatial correlation matrices of the horizontal and vertical domains, respectively, and are similarly defined as in (28) with r r denoting the correlation coefficient. {Φ r,k = Φ h r,k ⊗ Φ v r,k } (Φ r,u for the single-user case) are similarly modeled as Φ r with {r r,k } (r r,u for the single-user case) denoting the corresponding correlation coefficients. The deterministic component of each channel is modeled as a random matrix/vector with i.i.d. CSCG entries of zero mean and unit variance, and kept fixed during the entire time interval. Other system parameters are set as follows unless otherwise specified: σ 2 k = −80 dBm, P = 5 dBm, C 0 = −30 dB, N = 40, and for the single-user case, we set M = 4, ǫ in = 10 −4 , ǫ out = 10 −6 , β AI = β Iu = 3 dB and β Au = −3 dB, while for the multiuser case, we let M = 6, K = 4, T H = 10, T s = 2000, α k = 1, ∀k ∈ K, c = 0.95, τ = 0.01, ρ t = t −0.8 , γ t = t −1 , β AI = β Iu = 5 dB and β Au = −5 dB. All the results are averaged over 2000 independent channel realizations.
A. Single-User Case
We first consider the single-user case where the user is assumed to move along the line (2 m, d, 0), as shown in Fig. 3. For comparison, we adopt the following five benchmark schemes: 1) the SDR method with Gaussian randomization , 2) a naive scheme where the phase shifts at the IRS are obtained by Algorithm 1 with I-CSI at the first time slot and then kept fixed for all the subsequent time slots, 3) a single-timescale scheme where both v and w are optimized based on the S-CSI and kept fixed for all the time slots, 4) the random phase-shift scheme where the phase shifts at the IRS are randomly generated at each time slot, and 5) the conventional scheme by using the MRT beamforming at the AP, but without the IRS. First, we compare in Fig. 4 the convergence behaviors of the BCD algorithm and the proposed PDD-based algorithm (Algorithm 1) with d = 50 m. In our simulations, using the SDR method with Gaussian randomization to solve problem (12) can achieve near-optimal performance, as shown for a similar problem in . Therefore, its performance is considered as an upper bound for the BCD and the PDDbased algorithms. From Fig. 4 (a) and (b), it is observed that the BCD algorithm is monotonically convergent, while this is not the case for the PDD-based algorithm in general. Furthermore, there exist fluctuations of the objective value in the initial few iterations of the PDD-based algorithm. This is mainly because when the initial penalty is relatively small, the solutions obtained by the PDD-based algorithm do not satisfy v n = u n , ∀n ∈ N , thus resulting in the oscillatory behavior. As the penalty increases with the iteration number, the constraint violation, i.e., v − u ∞ , is forced to approach the predefined accuracy ǫ out , as shown in Fig. 4 (c). As a result, the PDD-based algorithm is guaranteed to converge which can be observed from Fig. 4 (b). Moreover, one can observe from Fig. 4 (a) and (b) that the PDD-based algorithm can achieve near-optimal performance in the continuous phase-shift case, i.e., Q = ∞, and for the discrete phase-shift case (e.g. Q = 1), its performance is similar to that of the BCD algorithm.
1) Impact of the AP-user distance d: In Fig. 5, we plot the achievable average rate of the user versus the AP-user distance d with r r = r r,u = 0.5 and r d = 0.2. It is observed that when the user lies in the neighborhood of the IRS, the achievable average rate by using 1-bit phase shifters (Q = 1) with S-CSI is significantly higher than that without IRS and that with random phase shift at the IRS. This means that IRS is practically useful by creating a "signal hot spot" even with coarse and low-cost phase shifters and S-CSI. Moreover, it is observed that using IRS with 1-bit phase shifters results in a considerable performance loss as compared to the ideal case with continuous phase shifters. However, this performance gap can be effectively reduced by using higher-resolution phase shifters, e.g., Q = 2 and Q = 3. 2) Impact of the Rician factor: In Fig. 6, we plot the achievable average rate versus the Rician factor by fixing d = 50 m. To focus on the effect of Rician factor on the system rate performance, we assume β AI = β Iu = β, β Au = 0 and r r = r r,u = r d = 0, i.e., the AP-user link is assumed to follow Rayleigh fading (no deterministic components exist due to blockage) while the AP-IRS and IRS-user links are assumed to follow uncorrelated Rician fading. It is observed from Fig. 6 that the performance of all algorithms with both S-CSI and I-CSI improves with β. This is expected since as β increases, the AP-IRS channel becomes more correlated which is highly beneficial for achieving the maximum beamforming gain in the single-user case. In particular, for the S-CSI case, another important reason is that when β increases, the AP-IRS-user link becomes more deterministic, thus rendering the proposed scheme based on S-CSI to be more effective. Furthermore, we can observe that the performance gap between the two cases (I-CSI and S-CSI) eventually approaches a constant when β is sufficiently large. This is because the AP-user link is assumed to be Rayleigh fading, thus for the S-CSI case, no statistical information can be extracted and exploited to further improve the achievable average rate. It is also observed that the PDDbased algorithm outperforms the naive and the single-timescale schemes since they do not fully exploit the S-CSI and I-CSI, respectively. 3) Impact of the correlation coefficients r r and r r,u : In Fig. 7, we investigate the achievable average rate versus the correlation coefficients r r and r r,u . For ease of comparison, we set d = 50 m, r d = 0 and β Au = β AI = β Iu = 0. From Fig. 7, we observe that for the I-CSI case, the achievable average rate improves with the increasing of r r (with the reason similar to that in Fig. 6), but this does not hold when increasing r r,u . To be specific, the performance with {r r = 0.5, r r,u = 1}/{r r = 1, r r,u = 1} is inferior to that with {r r = 0.5, r r,u = 0.9}/{r r = 1, r r,u = 0.5}. This is because when r r,u is close to 1, i.e., the IRS-user channel is fully correlated (i.e., the entries in h r are almost identical), the degree of freedom (DoF) when adjusting the transmit precoding vector for signal alignment at the user becomes very limited. Furthermore, it is observed that the benefit brought by increasing r r and r r,u is more pronounced for the algorithms based on S-CSI than that based on I-CSI. We can also observe that for the S-CSI-based schemes, the effects of increasing r r or r r,u are similar, and the best performance is achieved when both r r and r r,u are close to 1. Besides, using IRS with discrete phase shifters incurs only negligible rate loss in this case. This is mainly because when r r is small, exploiting S-CSI only is generally ineffective and thus using continuous phase shifters can only achieve marginal performance gain. When r r is close to 1, the rate achieved by aligning the phase shifts of all the reflecting elements is already sufficiently high, thus weakening the gain of using continuous phase shifters.
B. Multiuser Case
Next, we consider a multiuser system with four users, denoted by U k 's, k ∈ {1, · · · , 4} and their locations are shown in Fig. 8, i.e., the users lie on a semicircle centered at (0, 50 m, 0) with radius d 1 = 3 m. This setup can practically correspond to the case that the IRS is deployed at the celledge to cover an area with a high density of users (e.g., a hot-spot scenario). Moreover, to investigate the impacts of the correlation coefficients {r r,k } on each user's achievable average rate, we assume r r,k = k−1 3 , i.e., each IRS-user link has a different correlation level. Similar to the single-user case, four benchmark schemes are considered: 1) an I-CSIbased algorithm which is obtained by combining the WMMSE method and the PDD method, and assuming perfect I-CSI over all time slots, 2) a naive scheme by applying the I-CSI-based algorithm for the first time slot only, 3) the random phaseshift scheme (same as that in the single-user case), and 4) the scheme by applying the WMMSE method in , but without the IRS. We observe by simulation that in the multiuser case, the performance of the single-timescale scheme is even worse than that of the random phase-shift scheme. This is because when the Rician factor of the AP-user links is small, utilizing S-CSI alone results in severe multiuser interference, therefore its performance is not shown here. Prior to performance comparison, we first illustrate in Fig. 9 the convergence behavior of Algorithm 2 by plotting the average sum-rate of the users versus the number of iterations with r r = 0.5 and r d = 0. For comparison, we also consider a batch alternating optimization (AO) algorithm . From Fig. 9, we can observe that Algorithm 2 (with adjustable amplitude versus unit amplitude) and the batch AO algorithm achieve a similar performance when convergence is reached. However, since Algorithm 2 with adjustable amplitude converges faster and consumes less storage space as compared with the batch AO algorithm, we only provide the performance of Algorithm 2 in the following. Moreover, Algorithm 2 with adjustable amplitude also converges faster than that with unit amplitude. This is mainly due to fact that when the amplitudes can be adjusted in the interval , a larger feasible region can be explored in the first few iterations of the algorithm, which helps accelerate its convergence. 1) Impact of the Rician factor: In Fig. 10, we investigate the average sum-rate achieved by Algorithm 2 versus the Rician factor of the AP-IRS-user links, where we assume β AI = β Iu = β, β Au = 0, r r = r d = 0 and r r,k = 0, ∀k ∈ K for simplicity. For the I-CSI-based scheme, we also provide the performance of the algorithm proposed in for comparison. As shown, the PDD-based algorithm achieves a similar performance as the algorithm in . Then, similar to the single-user case shown in Fig. 6, it can be observed that the performance gap between the schemes based on I-CSI versus S-CSI decreases with the increasing of β. The performance gap cannot approach zero under the considered simulation setup, besides the reason mentioned in Fig. 6, this is also because the multiuser interferences are the performance bottleneck in the multiuser case, therefore if no I-CSI can be exploited in the IRS reflection design to effectively cancel them, the average sum-rate would deteriorate. Furthermore, we observe that different from the proposed algorithm based on S-CSI, the average sum-rate of the random phase-shift scheme and that without IRS are insensitive to the Rician factor.
2) Impact of the correlation coefficient r r : In Fig. 11, we plot the average sum-rate achieved by Algorithm 2 versus the IRS correlation coefficient r r with fixed r d = 0 and different total transmit power budgets (P = 5 dBm or 30 dBm). To focus on studying the effect of r r , the AP-user direct links are assumed to be fully blocked, i.e., h d,k = 0, ∀k ∈ K. Note that when P = 5 dBm, very few users will be scheduled for transmission at each time slot in general according to the solution obtained by solving problem (8), since the transmit power is limited. In this case, it is observed that the performance of both schemes based on I-CSI or S-CSI improves with the increasing of r r , and the performance gap between them is significantly reduced when r r is close to 1. For the I-CSI case, the performance improvement comes from the fact that when r r → 1, the AP-IRS channel is nearly rank-one, and thus only one user will be scheduled in each channel realization, which reduces to the single-user case in Section V-A. Note that this is quite different from the power minimization problem in with individual user rate/SINR constraints, for which reducing the rank of G (or increasing the correlation in G) will decrease the spatial multiplexing gain and result in more severe multiuser interference, thus leading to degraded performance. For the S-CSI case, the average sum-rate increases more rapidly with the increasing of r r , since this reduces the randomness in CSI and enhances the passive beamforming gain. However, when P = 30 dBm, i.e., at the high SNR region, increasing r r too much is adverse since in this case the spatial multiplexing gain becomes the performance bottleneck of the system. Besides, similar results can be observed by assuming r r,k = r r,u , ∀k ∈ K and investigating the impact of the correlation coefficient r r,u on the average sum-rate, therefore their details are not shown for brevity.
3) Performance comparison with different IRS-user correlation levels: In Fig. 12, we investigate the achievable average rate of each user (with different values of r r,k ) achieved by Algorithm 2, where the simulation parameters are the same as those in Fig. 11. As can be seen from Fig. 12 (a), when P = 5 dBm, the achievable average rate of the user with larger r r,k is always higher than those with smaller ones. This is because when one IRS-user link is more correlated than the others, its S-CSI can be better exploited to improve its achievable rate. Thus, allocating more power to this user is more beneficial for the sum-rate maximization due to the limited total transmit power at the AP. As a result, we can observe that when r r = 1, the achievable average rate of user 1 (r r,1 = 0) is almost zero, whereas that of user 4 (r r,4 = 1) is the highest. Moreover, we note that although larger r r results in higher sum-rate, this may not be beneficial for achieving the spatial multiplexing gain, since in this case the passive beamforming design favors only a small number of users and user fairness is difficult to guarantee. In Fig. 12 (b), we can observe that the distribution of the achievable rate of each user changes with the IRS receive correlation coefficient r r when the total transmit power is high. Specifically, if r r = 0, then the AP-IRS channel becomes Rayleigh fading, user 1 with no channel correlation achieves the best average rate because the channel diversity gain for user 1 is higher than those of the others when optimizing the transmit precoding vectors. In contrast, when r r = 1 (in this case only one user can be supported), the performance of user 4 is the best since its channel is more deterministic and the optimization of the IRS phase shifts tends to favor this user with more dominant S-CSI, thus allocating more power to user 4 is beneficial.
VI. CONCLUSIONS
In this paper, we studied a new TTS-based joint active and passive beamforming optimization problem for an IRS-aided multiuser system. The weighted sum-rate was maximized under practical discrete phase-shift constraints at the IRS with only S-CSI. We proposed a novel TTS transmission protocol, where the long-term IRS phase shifts are optimized according to the S-CSI and the short-term transmit precoding vectors at the AP are designed adaptive to the instantaneous effective CSI with fixed phase shifts. A PDD-based algorithm and an SSCA algorithm were proposed for the single-user and multiuser cases, respectively. Simulation results showed that significant sum-rate performance gain can be achieved by using IRS based on S-CSI and with discrete phase shifters as compared to the case without IRS, especially when the deterministic Rician components dominate the channel and/or the channel correlation coefficients are large. It was also unveiled that channel correlations of the AP-IRS and IRS-user links exhibit distinct impacts on the proposed SSCA algorithm performance under different SNR regimes. |
<gh_stars>0
import os
DEBUG = False
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL', 'sqlite:///data.db')
|
def min_max_normalize(image, clip=False):
if not np.issubdtype(image.dtype, np.floating):
logging.info('Converting image dtype to float')
image = image.astype('float32')
if not len(np.shape(image)) == 4:
raise ValueError('Image must be 4D, input image shape was'
' {}.'.format(np.shape(image)))
for batch in range(image.shape[0]):
for channel in range(image.shape[-1]):
img = image[batch, ..., channel]
if clip:
img = np.clip(img, a_min=np.percentile(img, 0.01), a_max=np.percentile(img, 99.9))
min_val = np.min(img)
max_val = np.max(img)
normal_image = (img - min_val) / (max_val - min_val)
image[batch, ..., channel] = normal_image
return image |
//uses original deploymentId (not vid)
func RemoveProcess(deploymentId string) (err error) {
count, err := getDeploymentCount(deploymentId)
if err != nil {
return err
}
if count.Count == 0 {
return nil
}
client := &http.Client{}
url := Config.ProcessEngineUrl + "/engine-rest/deployment/" + deploymentId + "?cascade=true"
request, err := http.NewRequest("DELETE", url, nil)
_, err = client.Do(request)
return
} |
package http
import (
"bytes"
"encoding/gob"
"io/ioutil"
gohttp "net/http"
"time"
"github.com/plopezm/kaiser/core"
"github.com/plopezm/kaiser/core/types"
"github.com/robertkrimen/otto"
)
func init() {
core.RegisterPlugin(new(Plugin))
}
var netClient = &gohttp.Client{
Timeout: time.Second * 10,
}
// Response The structure of an Http response
type Response struct {
StatusCode int
Body string
Headers map[string][]string
}
// Plugin Implements http function
type Plugin struct {
context types.JobContext
}
// GetInstance Creates a new plugin instance with a context
func (plugin *Plugin) GetInstance(context types.JobContext) types.Plugin {
newPluginInstance := new(Plugin)
newPluginInstance.context = context
return newPluginInstance
}
// GetFunctions returns the functions to be registered in the VM
func (plugin *Plugin) GetFunctions() map[string]interface{} {
functions := make(map[string]interface{})
functions["http"] = map[string]interface{}{
"get": plugin.Get,
}
return functions
}
// Get Performs an http get request
func (plugin *Plugin) Get(call otto.FunctionCall) otto.Value {
urlArgument := call.Argument(0)
headers := call.Argument(1).Object()
url, err := urlArgument.ToString()
if err != nil {
res, _ := call.Otto.ToValue(err.Error())
return res
}
req, err := createRequest("GET", url, nil, headers)
if err != nil {
res, _ := call.Otto.ToValue(err.Error())
return res
}
resp, err := sendRequest(req)
if err != nil {
res, _ := call.Otto.ToValue(err.Error())
return res
}
valueToReturn, err := call.Otto.ToValue(resp)
if err != nil {
res, _ := call.Otto.ToValue(err.Error())
return res
}
return valueToReturn
}
// Post Performs an http post request
func (plugin *Plugin) Post(call otto.FunctionCall) otto.Value {
urlArgument := call.Argument(0)
body := call.Argument(1).Object()
headers := call.Argument(2).Object()
url, err := urlArgument.ToString()
if err != nil {
res, _ := call.Otto.ToValue(err.Error())
return res
}
req, err := createRequest("POST", url, body, headers)
if err != nil {
res, _ := call.Otto.ToValue(err.Error())
return res
}
resp, err := sendRequest(req)
if err != nil {
res, _ := call.Otto.ToValue(err.Error())
return res
}
valueToReturn, err := call.Otto.ToValue(resp)
if err != nil {
res, _ := call.Otto.ToValue(err.Error())
return res
}
return valueToReturn
}
func createRequest(method string, url string, body *otto.Object, headers *otto.Object) (*gohttp.Request, error) {
var bodyBuffer *bytes.Buffer
if body != nil {
var err error
bodyBytes, err := convertInterfaceToBytes(body)
if err != nil {
return nil, err
}
bodyBuffer = bytes.NewBuffer(bodyBytes)
} else {
bodyBuffer = bytes.NewBuffer([]byte{})
}
req, err := gohttp.NewRequest(method, url, bodyBuffer)
if err != nil {
return nil, err
}
if headers != nil {
for _, headerKey := range headers.Keys() {
value, _ := headers.Get(headerKey)
valueString, _ := value.ToString()
req.Header.Set(headerKey, valueString)
}
}
return req, nil
}
func sendRequest(req *gohttp.Request) (*Response, error) {
resp, err := netClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
var jsResponse = &Response{
StatusCode: resp.StatusCode,
Body: string(respBody),
Headers: resp.Header,
}
return jsResponse, nil
}
func convertInterfaceToBytes(object interface{}) ([]byte, error) {
var buf bytes.Buffer
if object == nil {
return nil, nil
}
enc := gob.NewEncoder(&buf)
err := enc.Encode(object)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
|
def groupX(categ):
N = len(categ)
p = len(np.unique(categ))
if np.max(categ) > p - 1:
raise Exception("the maximum category number should not exceed one minus the number of categories")
X = np.zeros((N,p))
for I in np.arange(N):
X[I, int(categ[I])] = 1
return X |
A central Florida school district may ban extracurricular student clubs in an attempt to prevent the formation of a Gay-Straight Alliance at a local middle school.
The Lake County School District discussed amending the rules regarding student clubs in order to blockade a Gay-Straight Alliance at Carver Middle School in Leesburg, according to the Miami Herald. A group of students attempted to form the Gay-Straight Alliance during the 2011-2012 school year, but were turned down. One eighth-grade student, Bayli Silberstein, persisted and reapplied for permission in November.
Silberstein's push for a GSA is in response to anti-gay bullying at the school, according to WFTV. She claims she and her friends were tormented by others for their sexual orientation. "It hurt, and that is something that I did not want to see continuing," she told central Florida's ABC local affiliate WFTV.
After Silberstein's application was previously denied, the American Civil Liberties Union got involved, according to WFTV. The ACLU sent a letter to the school district saying that if school officials continued to obstruct the formation of the group, taxpayers may have to foot the bill for a very expensive lawsuit.
The Florida School district is legally obligated to permit a Gay-Straight Alliance, per the federal Equal Access Act. The legislation states that it is unlawful for a school receiving federal funding "to deny equal access or a fair opportunity to, or discriminate against, any students who wish to conduct a meeting... on the basis of the religious, political, philosophical, or other content of the speech."
So now, Lake County may sacrifice all to thwart one.
According to the Orlando Sentinel, school board members mulled over the issue on Monday. Chairwoman Kyleen Fischer reasoned that the school is not responsible for "social engineering." Board member Tod Howard expressed worry over losing the more valuable clubs, remarking during the meeting, "I am very concerned that one club would push out the remainder of the clubs that are doing good things."
The Gay, Lesbian, Straight Education Network (GLSEN) was floored over the school board's plans.
“It’s shocking that a school board would go so far to ban a student club designed to provide a safe and affirming space for all students,” Dr. Eliza Byard, GLSEN’s Executive Director, said in a GLSEN news release on the matter. “Research has consistently shown that GSAs improve school climate and students develop a stronger connection to their school community. By denying access to a GSA, the school board is sending the wrong message that LGBT students are undeserving of the same opportunity to a quality education.”
GLSEN also said that students at schools with Gay-Straight Alliances experience less homophobia and victimization, and are less likely to feel unsafe because of their sexual orientation than at schools without.
Thursday marked National Gay-Straight Alliance Day. |
#include <bits/stdc++.h>
using namespace std;
typedef long long int ll;
int main() {
ll T,n,m,ans,min;
char c;
cin>>T;
vector<int>row,col;
while(T--){
cin>>n>>m;
min=n+m;
int a[n][m];
row.clear();
col.clear();
ans=0;
for(ll i=0;i<n;i++){
for(ll j=0;j<m;j++){
a[i][j]=n+m;
}
}
for(ll i=0;i<n;i++)row.push_back(0);
for(ll i=0;i<m;i++)col.push_back(0);
for(ll i=0;i<n;i++){
for(ll j=0;j<m;j++){
cin>>c;
if(c=='*') {a[i][j]++;row[i]++;col[j]++;}
}
}
for(ll i=0;i<n;i++){
for(ll j=0;j<m;j++){
a[i][j]=a[i][j]-row[i]-col[j];
if(a[i][j]<min)min=a[i][j];
}
}
cout<<min-1<<endl;
}
return 0;
} |
{-# LANGUAGE ParallelListComp #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- Implementation details can be found in the technical report 'http://arxiv.org/pdf/1010.1128v3.pdf'.
-- | This module provides the \EpoStar\ processor.
module Tct.Trs.Processor.EpoStar
( epoStarDeclaration
, epoStar
, epoStar'
, ExtComp (..)
, extCompArg
) where
import Control.Applicative
import Control.Monad
import qualified Data.Array as Ar
import qualified Data.Foldable as F (foldr)
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import qualified Data.Set as S
import qualified Tct.Core.Common.Pretty as PP
import qualified Tct.Core.Common.Xml as Xml
import qualified Tct.Core.Data as T
import SLogic.Smt.Solver (minismt)
import qualified Data.Rewriting.Rule as R (lhs, rhs)
import qualified Data.Rewriting.Term as R
import Tct.Common.ProofCombinators
import Tct.Common.SMT ((.&&), (.<=>), (.<=), (.==), (.=>), (.>), (.||))
import qualified Tct.Common.SMT as SMT
import Tct.Trs.Data
import Tct.Trs.Data.Precedence (Order (..), Precedence (..))
import qualified Tct.Trs.Data.Precedence as Prec
import qualified Tct.Trs.Data.Problem as Prob
import qualified Tct.Trs.Data.Rewriting as R (directSubterms)
import qualified Tct.Trs.Data.Signature as Sig
import qualified Tct.Trs.Data.Rules as RS
import qualified Tct.Trs.Encoding.SafeMapping as SM
newtype EpoStar = EpoStar { extComp_ :: ExtComp } deriving Show
data ExtComp = ExtComp | NoExtComp deriving (Bounded, Enum, Eq, Show)
useExtComp :: ExtComp -> Bool
useExtComp = (ExtComp==)
data EpoStarProof f v = EpoStarProof
{ stricts_ :: Rules f v -- ^ The oriented input TRS.
, safeMapping_ :: SM.SafeMapping f -- ^ The safe mapping.
, precedence_ :: Precedence f -- ^ The precedence.
, argumentPermutation_ :: MuMapping f -- ^ Employed argument permutation.
, signature_ :: Signature f -- ^ Signature underlying 'epoInputTrs'
} deriving Show
instance T.Processor EpoStar where
type ProofObject EpoStar = ApplicationProof (OrientationProof (EpoStarProof F V))
type In EpoStar = Trs
type Out EpoStar = Trs
type Forking EpoStar = T.Judgement
execute p prob =
maybe epo (\s -> T.abortWith (Inapplicable s :: ApplicationProof (OrientationProof (EpoStarProof F V)))) maybeApplicable
where
maybeApplicable = Prob.isRCProblem' prob <|> Prob.isInnermostProblem' prob <|> RS.isConstructorTrs' sig trs
trs = Prob.allComponents prob
sig = Prob.signature prob
-- solver = SMT.smtSolveTctM prob
solver = minismt
epo = do
res <- entscheide solver trs sig (extComp_ p)
case res of
SMT.Sat m -> T.succeedWith0 (Applicable . Order $ nproof m) (const $ T.timeUBCert (T.Exp Nothing))
_ -> T.abortWith (Applicable (Incompatible :: OrientationProof (EpoStarProof F V)))
nproof (prec,safe,mu) = EpoStarProof
{ stricts_ = trs
, safeMapping_ = safe
, precedence_ = prec
, argumentPermutation_ = mu
, signature_ = sig }
find :: Ord k => k -> M.Map k v -> v
find e = fromMaybe (error "EpoStar.find") . M.lookup e
--- * precedence encoding --------------------------------------------------------------------------------------------
data PrecedenceEncoder f w = PrecedenceEncoder
(Precedence f) -- ^ initial precedence
(M.Map f (SMT.IExpr w)) -- ^ a (bounded) integer variable for each defined symbol
precedenceEncoder :: (SMT.Fresh w, Ord f) => Signature f -> SMT.SmtSolverSt w (PrecedenceEncoder f w)
precedenceEncoder sig = PrecedenceEncoder (Prec.empty sig) . M.fromList <$> mapM bounded (S.toList ds)
where
bounded f = do v <- SMT.ivarM'; SMT.assert (v .> SMT.zero .&& v .<= SMT.num k); return (f,v)
ds = Sig.defineds sig
k = S.size ds
precedence :: Ord f => PrecedenceEncoder f w -> Order f -> SMT.Formula w
precedence (PrecedenceEncoder (Precedence (sig,_)) fm) (f :>: g)
| f == g = SMT.bot
| Sig.isConstructor f sig = SMT.bot
| Sig.isConstructor g sig = SMT.top
| otherwise = find f fm .> find g fm
precedence (PrecedenceEncoder (Precedence (sig,_)) fm) (f :~: g)
| f == g = SMT.top
| cf && cg = SMT.top
| cf && not cg = SMT.bot
| not cf && cg = SMT.bot
| otherwise = find f fm .== find g fm
where
cf = Sig.isConstructor f sig
cg = Sig.isConstructor g sig
instance (Ord f, SMT.Storing w) => SMT.Decode (SMT.Environment w) (PrecedenceEncoder f w) (Precedence f) where
decode (PrecedenceEncoder (Precedence (sig,_)) fm) = do
fis :: [(f,Int)] <- M.assocs <$> SMT.decode fm
return $ Precedence (sig, gts fis ++ eqs fis)
where
gts fis = [ f:>:g | (f,i) <- fis, (g,j) <- fis, i > j ]
eqs fis = [ f:~:g | (f,i) <- fis, (g,j) <- fis, i == j ]
--- * safe mapping ---------------------------------------------------------------------------------------------------
data SafeMappingEncoder f w = SafeMappingEncoder
(SM.SafeMapping f) -- ^ initial safe mapping
(M.Map (f,Int) (SMT.Formula w)) -- ^ variable safe_f_i for defined symbol f and argument position i
safeMappingEncoder :: (SMT.Fresh w, Ord f) => Signature f -> SMT.SmtSolverSt w (SafeMappingEncoder f w)
safeMappingEncoder sig = SafeMappingEncoder (SM.empty sig) <$> sfm
where
sfm = M.fromList <$> mapM bvar [ (f,i) | f <- S.toList (Sig.defineds sig), i <- Sig.positions sig f ]
bvar k = SMT.bvarM' >>= \v -> return (k,v)
safeMapping :: Ord f => SafeMappingEncoder f w -> f -> Int -> SMT.Formula w
safeMapping (SafeMappingEncoder (SM.SafeMapping (sig,_)) sfm) f i
| Sig.isConstructor f sig = SMT.top
| otherwise = find (f,i) sfm
instance (Ord f, SMT.Storing w) => SMT.Decode (SMT.Environment w) (SafeMappingEncoder f w) (SM.SafeMapping f) where
decode (SafeMappingEncoder sf sfm) = do
sfs :: S.Set (f,Int) <- SMT.decode (SMT.Property (fromMaybe False) sfm)
return $ F.foldr (uncurry SM.setSafe) sf sfs
--- * mu mapping -----------------------------------------------------------------------------------------------------
newtype MuMapping f = MuMapping (M.Map f [Int]) deriving Show
-- mu_f,i,k = mu(f,i)=k
newtype MuMappingEncoder f w = MuMappingEncoder (M.Map f (Ar.Array (Int,Int) (SMT.Formula w)))
muMappingEncoder :: (SMT.Fresh w, Ord f) => Signature f -> SMT.SmtSolverSt w (MuMappingEncoder f w)
muMappingEncoder sig = MuMappingEncoder . M.fromList <$> mapM bijection (Sig.elems sig)
where
bijection (f,af) = do
let ((u,l),(o,r)) = ((1,1),(af,af))
ar <- Ar.listArray ((u,l),(af,af)) <$> replicateM (af*af) SMT.bvarM'
sequence_ $ do
x <- Ar.range (u,o)
return $ SMT.assert $ exactlyOne1 (ar,l,r) x $ do y <- Ar.range (l,r); return (ar Ar.! (x,y))
sequence_ $ do
x <- Ar.range (u,o)
return $ SMT.assert $ exactlyOne2 (ar,l,r) x $ do y <- Ar.range (l,r); return (ar Ar.! (y,x))
return (f,ar)
exactlyOne1 (ar,l,r) x vs = SMT.bigOr vs .&&
SMT.bigAnd [ SMT.bigAnd [ SMT.bnot (ar Ar.! (x,i)) .|| SMT.bnot (ar Ar.! (x,j)) | j <- [i+1..r] ] | i <- [l..r-1] ]
exactlyOne2 (ar,l,r) x vs = SMT.bigOr vs .&&
SMT.bigAnd [ SMT.bigAnd [ SMT.bnot (ar Ar.! (i,x)) .|| SMT.bnot (ar Ar.! (j,x)) | j <- [i+1..r] ] | i <- [l..r-1] ]
muMapping :: Ord f => MuMappingEncoder f w -> f -> (Int, Int) -> SMT.Formula w
muMapping (MuMappingEncoder fm) f ix = find f fm Ar.! ix
instance (Ar.Ix i, SMT.Decode m c a) => SMT.Decode m (Ar.Array i c) (Ar.Array i a) where
decode ar = Ar.array bnds <$> mapM ( \(i,a) -> SMT.decode a >>= \c -> return (i,c) ) (Ar.assocs ar)
where bnds = Ar.bounds ar
instance (Ord f, SMT.Storing w) => SMT.Decode (SMT.Environment w) (MuMappingEncoder f w) (MuMapping f) where
decode (MuMappingEncoder fm) = do
fa :: M.Map f (Ar.Array (Int,Int) Bool) <- SMT.decode fm
return $ MuMapping $ (\ar -> foldr set [] $ Ar.assocs ar) `fmap` fa
where
set (_, False) is = is
set ((_,k),True) is = k:is
--- * orient ---------------------------------------------------------------------------------------------------------
data EpoOrder f v
= Epo (R.Term f v) (R.Term f v)
| Eposub (R.Term f v) (R.Term f v)
| Eq (R.Term f v) (R.Term f v)
deriving (Eq, Ord)
unorientable :: (Ord f, Ord v) => Signature f -> R.Term f v -> R.Term f v -> Bool
unorientable sig u v =
varsS u `S.isProperSubsetOf` varsS v
|| (isConstructorTerm u && not (isConstructorTerm v))
where
varsS = S.fromList . R.vars
isConstructorTerm = all (`Sig.isConstructor` sig) . R.funs
entscheide :: (Functor m, Monad m, Ord f, Ord v, Show f, Show v) =>
SMT.SmtSolver m Int
-> Rules f v
-> Signature f
-> ExtComp
-> m (SMT.Result (Precedence f, SM.SafeMapping f, MuMapping f))
entscheide solver trs sig ecomp = do
res :: SMT.Result (Precedence f, SM.SafeMapping f, MuMapping f) <- SMT.smtSolveSt solver $ do
SMT.setLogic SMT.QF_LIA
sfenc <- safeMappingEncoder sig
prenc <- precedenceEncoder sig
muenc <- muMappingEncoder sig
let
safe = safeMapping sfenc
prec = precedence prenc
mu = muMapping muenc
epo s t = orient (useExtComp ecomp) sig prec safe mu (Epo s t)
-- epoM s t = orientM (useExtComp ecomp) sig prec safe mu (Epo s t)
SMT.assert $ SMT.bigAnd [ R.lhs r `epo` R.rhs r | r <- rs ]
-- SMT.assert =<< fst <$> SMT.memo (SMT.bigAndM [ R.lhs r `epoM` R.rhs r | r <- rs ])
-- consistency (5)
SMT.assert $ SMT.bigAnd
[
prec (f:~:g) .=> SMT.bigAnd
[
(mu f (i,k) .&& mu g (j,k)) .=> (safe f i .<=> safe g j)
| i <- [1..af], j <- [1..af], k <- [1..af] ]
| let ds = S.toList (Sig.defineds sig), f <- ds, g <- ds, let af = Sig.arity sig f, af == Sig.arity sig g ]
return $ SMT.decode (prenc, sfenc, muenc)
return $ res
where
rs = RS.toList trs
orient :: (Show v, Show f, Ord v, Ord f) =>
Bool
-> Signature f
-> (Order f -> SMT.Formula w)
-> (f -> Int -> SMT.Formula w)
-> (f -> (Int, Int) -> SMT.Formula w)
-> EpoOrder f v -> SMT.Formula w
orient allowEcomp sig prec safe mu a =
case a of
Epo s t -> s `epoimp` t
Eposub s t -> s `eposubimp` t
Eq s t -> s `equivimp` t
where
ite g t e = (g .=> t) .&& ((SMT.bnot g) .=> e)
unsatisfiable = unorientable sig
s `epo` t = orient allowEcomp sig prec safe mu (Epo s t)
s `eposub` t = orient allowEcomp sig prec safe mu (Eposub s t)
s `equiv` t = orient allowEcomp sig prec safe mu (Eq s t)
-- epo: s >epo* t
u `epoimp` v
| u `isProperSupertermOf` v = SMT.top
| unorientable sig u v = SMT.bot
| otherwise = SMT.bigOr [u `epo1` v, u `epo23` v]
where
isProperSupertermOf s t = t `elem` (concatMap R.subterms $ R.directSubterms s)
epo1 (R.Fun _ ss) t = SMT.bigOr [ SMT.bigOr [si `epo` t, si `equiv` t] | si <- ss ]
epo1 _ _ = SMT.bot
epo23 s@(R.Fun f ss) (R.Fun g ts) =
SMT.bigAnd
[ SMT.bigOr [ prec $ f:>:g, epo3 ]
, SMT.bigAnd [ ite (safe g i) (s `epo` ti) (s `eposub` ti) | (i,ti) <- tsi ] ]
where
ssi = [ (i, si) | i <- [1..] | si <- ss ]
tsi = [ (i, ti) | i <- [1..] | ti <- ts ]
epo3
| Sig.isDefined g sig && not (null ss) && length ss == length ts = SMT.bigAnd [ prec (f:~:g), epolex 1 ]
| otherwise = SMT.bot
epolex k
| length ssi < k = SMT.bot
| otherwise = SMT.bigAnd [
let
rec = epolex (k+1)
comp = SMT.bigOr [ si `eposub` tj, SMT.bigAnd [si `equiv` tj, rec] ]
in
SMT.bigAnd [mu f (i,k), mu g (j,k)] .=> ite (safe g j) rec comp
| (i, si) <- ssi, (j, tj) <- tsi]
epo23 _ _ = SMT.bot
-- eposub: s \sqltepo t
u `eposubimp` v
| unsatisfiable u v = SMT.bot
| otherwise = SMT.bigOr [ u `eposub1` v, u `eposub2` v ]
where
(R.Fun f ss) `eposub1` t = SMT.bigOr [ SMT.bigAnd [maybeNormal i, SMT.bigOr [si `eposub` t, si `equiv` t]] | i <- [1..] | si <- ss]
where
maybeNormal i
| Sig.isDefined f sig = SMT.bnot $ safe f i
| otherwise = SMT.top
_ `eposub1` _ = SMT.bot
-- special case: with extended composition
s@(R.Fun f _) `eposub2` (R.Fun g ts)
| allowEcomp = SMT.bigAnd [ prec (f:>:g) , SMT.bigAnd [ SMT.bigAnd [ safe g i, s `eposub` ti] | i <- [1..] | ti <- ts] ]
| otherwise = SMT.bot
_ `eposub2` _ = SMT.bot
-- equivalence: s ~ t modulo mu
u `equivimp` v
| u == v = SMT.top
| unsatisfiable u v = SMT.bot
| otherwise = case (u,v) of
(R.Var _ , R.Var _) -> SMT.bot
(R.Fun f ss, R.Fun g ts)
| unsat -> SMT.bot
| otherwise -> SMT.bigAnd
[ prec (f:~:g)
, SMT.bigAnd
[ SMT.bigAnd [mu f (i,k), mu g (j,k)] .=> (si `equiv` tj)
| (i,si) <- zip [1..] ss, (j,tj) <- zip [1..] ts, k <- [1..length ss] ]
]
where unsat = length ss /= length ts || (Sig.isConstructor f sig /= Sig.isConstructor g sig)
_ -> SMT.bot
-- FIXME: MS monadic/memoised version has a memory problem
-- it doesn't affect the outcome on the (used) testbed
-- orientM :: (Show v, Show f, Ord v, Ord f, Monad m) =>
-- Bool
-- -> Signature f
-- -> (Order f -> SMT.Formula w)
-- -> (f -> Int -> SMT.Formula w)
-- -> (f -> (Int, Int) -> SMT.Formula w)
-- -> EpoOrder f v -> SMT.Memo (EpoOrder f v) (SMT.Formula w) m (SMT.Formula w)
-- orientM allowEcomp sig prec safe mu = SMT.memoized $ \a ->
-- case a of
-- Epo s t -> s `epoimp` t
-- Eposub s t -> s `eposubimp` t
-- Eq s t -> s `equivimp` t
-- where
-- precM = return . prec
-- safeM g = return . safe g
-- iteM g t e = SMT.impliesM g t `SMT.bandM` SMT.impliesM (SMT.bnotM g) e
-- unsatisfiable u v = unorientable sig u v
-- s `epo` t = orientM allowEcomp sig prec safe mu (Epo s t)
-- s `eposub` t = orientM allowEcomp sig prec safe mu (Eposub s t)
-- s `equiv` t = orientM allowEcomp sig prec safe mu (Eq s t)
-- -- epo: s >epo* t
-- u `epoimp` v
-- | u `isProperSupertermOf` v = SMT.topM
-- | unorientable sig u v = SMT.botM
-- | otherwise = SMT.bigOrM [u `epo1` v, u `epo23` v]
-- where
-- isProperSupertermOf s t = any (t==) (concatMap R.subterms $ R.directSubterms s)
-- epo1 (R.Fun _ ss) t = SMT.bigOrM [ SMT.bigOrM [si `epo` t, si `equiv` t] | si <- ss ]
-- epo1 _ _ = SMT.botM
-- epo23 s@(R.Fun f ss) (R.Fun g ts) =
-- SMT.bigAndM
-- [ SMT.bigOrM [ precM $ f:>:g, epo3 ]
-- , SMT.bigAndM [ iteM (safeM g i) (s `epo` ti) (s `eposub` ti) | (i,ti) <- tsi ] ]
-- where
-- ssi = [ (i, si) | i <- [1..] | si <- ss ]
-- tsi = [ (i, ti) | i <- [1..] | ti <- ts ]
-- epo3
-- | Sig.isDefined g sig && not (null ss) && length ss == length ts = SMT.bigAndM [ precM (f:~:g), epolex 1 ]
-- | otherwise = SMT.botM
-- epolex k
-- | length ssi < k = SMT.botM
-- | otherwise = SMT.bigAndM [
-- let
-- rec = epolex (k+1)
-- comp = SMT.bigOrM [ si `eposub` tj, SMT.bigAndM [si `equiv` tj, rec] ]
-- in
-- return (SMT.bigAnd [mu f (i,k), mu g (j,k)]) `SMT.impliesM` iteM (safeM g j) rec comp
-- | (i, si) <- ssi, (j, tj) <- tsi]
-- epo23 _ _ = SMT.botM
-- -- eposub: s \sqltepo t
-- u `eposubimp` v
-- | unsatisfiable u v = SMT.botM
-- | otherwise = SMT.bigOrM [ u `eposub1` v, u `eposub2` v ]
-- where
-- (R.Fun f ss) `eposub1` t = SMT.bigOrM [ SMT.bigAndM [maybeNormal i, SMT.bigOrM [si `eposub` t, si `equiv` t]] | i <- [1..] | si <- ss]
-- where
-- maybeNormal i
-- | Sig.isDefined f sig = return $ SMT.bnot $ safe f i
-- | otherwise = SMT.topM
-- _ `eposub1` _ = SMT.botM
-- -- special case: with extended composition
-- s@(R.Fun f _) `eposub2` (R.Fun g ts)
-- | allowEcomp = SMT.bigAndM [ precM (f:>:g) , SMT.bigAndM [ SMT.bigAndM [ return (safe g i), s `eposub` ti] | i <- [1..] | ti <- ts] ]
-- | otherwise = SMT.botM
-- _ `eposub2` _ = SMT.botM
-- -- equivalence: s ~ t modulo mu
-- u `equivimp` v
-- | u == v = SMT.topM
-- | unsatisfiable u v = SMT.botM
-- | otherwise = case (u,v) of
-- (R.Var _ , R.Var _) -> SMT.botM
-- (R.Fun f ss, R.Fun g ts)
-- | unsat -> SMT.botM
-- | otherwise -> SMT.bigAndM
-- [ precM (f:~:g)
-- , SMT.bigAndM
-- [ return (SMT.bigAnd [mu f (i,k), mu g (j,k)]) `SMT.impliesM` (si `equiv` tj)
-- | (i,si) <- zip [1..] ss, (j,tj) <- zip [1..] ts, k <- [1..length ss] ]
-- ]
-- where unsat = length ss /= length ts || (Sig.isConstructor f sig /= Sig.isConstructor g sig)
-- _ -> SMT.botM
--- * instances ------------------------------------------------------------------------------------------------------
extCompArg :: T.Argument 'T.Required ExtComp
extCompArg = T.flag "extend"
[ "Extended Composition: If this flag is enabled, then the slightly more ."
, "liberal composition scheme 'f(x;y) = h(g(;x);k(x;y))' is permitted."
, "Currently it is not known whether this extension is sound." ]
description :: [String]
description = [ unwords
[ "This processor implements orientation of the input problem using 'exponential path orders',"
, "a technique applicable for innermost runtime-complexity analysis."
, "Exponential path orders are a miniaturisation of 'lexicographic path orders',"
, "restricted so that compatibility assesses exponential runtime complexity."] ]
-- FIXME: MS: TODO: timeout shouldn't be necessary; for some reason
-- @timeoutInt 5 (timeoutIn 5 epostar)@ works fine but @timeoutIn 5 not@
epoStarStrategy :: ExtComp -> TrsStrategy
-- epoStarStrategy = T.timeoutRemaining . T.meoutRemaining . Proc . EpoStar
epoStarStrategy = T.Apply . EpoStar
epoStarDeclaration :: T.Declaration ('[T.Argument 'T.Optional ExtComp] T.:-> TrsStrategy)
epoStarDeclaration = T.declare "epostar" description (T.OneTuple exArg) epoStarStrategy
where exArg = extCompArg `T.optional` NoExtComp
epoStar :: TrsStrategy
epoStar = T.deflFun epoStarDeclaration
epoStar' :: ExtComp -> TrsStrategy
epoStar' = T.declFun epoStarDeclaration
--- * proofdata ------------------------------------------------------------------------------------------------------
instance PP.Pretty f => PP.Pretty (MuMapping f) where
pretty (MuMapping m) = PP.hsep $ PP.punctuate (PP.char ',') (pp `fmap` M.toList m)
where pp (f,is) = PP.text "mu" <> PP.parens (PP.pretty f) PP.<+> PP.char '=' PP.<+> PP.list' is
instance (Ord f, PP.Pretty f, PP.Pretty v) => PP.Pretty (EpoStarProof f v) where
pretty proof = PP.vcat
[ PP.text "Strict Rules in Predicative Notation:"
, ppind (SM.prettySafeTrs (safeMapping_ proof) (stricts_ proof))
, PP.text "Safe Mapping:"
, ppind (safeMapping_ proof)
, PP.text "Argument Permuation:"
, ppind (argumentPermutation_ proof)
, PP.text "Precedence:"
, ppind (precedence_ proof) ]
where ppind a = PP.indent 2 $ PP.pretty a
instance Xml.Xml (EpoStarProof f v) where
toXml _ = Xml.empty
|
// Not relevant, WsebConnector always uses /cbm (mixed frames binary encoding)
@Specification("binary.frames.only/text.encoding/response.header.content.type.has.unexpected.value/downstream.response")
void shouldCloseConnectionWhenBinaryFramesOnlyTextDownstreamResponseContentTypeHasUnexpectedValue()
throws Exception {
k3po.finish();
} |
/**
* Container Object of a Task
*/
public class TaskVO {
/**
* Description of the Task
*/
private String description;
/**
* Date of the task. A string in format DDMM (Day, Month)
*/
private String date;
/**
* Asignee of the task. If its a General Task, this String is empty
*/
private String assignee;
/**
* Type of the task. Its true if the task is general, false if its specific
*/
private boolean type;
/**
* Constructor that creates a specific Task
*
* @param description Description of the Task
* @param date Date of the Task
* @param assignee Asignee of the Task
*/
public TaskVO(String description, String date, String assignee) {
this.description = description;
this.date = date;
this.assignee = assignee;
this.type = false;
}
/**
* Constructor that creates a general Task
*
* @param description Description of the Task
* @param date Date of the Task
*/
public TaskVO(String description, String date) {
this.description = description;
this.date = date;
this.assignee = "";
this.type = true;
}
/**
* Returns the description of the class
*
* @return Description of the class
*/
public String getDescription() {
return description;
}
/**
* Changes the Description of the Task
*
* @param description New description
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Returns the date of the class
*
* @return Date of the class
*/
public String getDate() {
return date;
}
/**
* Changes the Date of the Task
*
* @param date New date, in the format DDMM
*/
public void setDate(String date) {
this.date = date;
}
/**
* Returns the Asignee of the class. If this is a
*
* @return Description of the class
*/
public String getAssignee() {
return assignee;
}
/**
* Changes the Asignee of the Task. Passing an empty String will convert the class into a
* general Task, passing a non-empty String will convert the class to an specific Task.
*
* @param assignee New Asignee
*/
public void setAssignee(String assignee) {
this.assignee = assignee;
}
/**
* Returns if the class represents a general Task
*
* @return true if the class is an general Task, false if its a specific
*/
public boolean isGeneral() {
return type;
}
/**
* Serialize class into JSON
*
* @return Object in JSON format
*/
public String serialize() {
return String.format("{\"description\":\"%s\",\"date\":\"%s\",\"assignee\":\"%s\",\"type\":\"%s\"}", this.description, this.date, this.assignee, this.type ? "general" : "specific");
}
} |
<filename>util/pre-get_training_data/setup_datautil.py
from distutils.core import setup, Extension
module = Extension('DataProcess', sources = ['datautil.cpp'],
library_dirs = ['/usr/local/cuda-8.0/lib64','./build'],
libraries = ['cudart','datautil'],
language = 'c')
setup(name = 'DataProcess', version = '1.0', description = 'DataProcess', ext_modules = [module])
|
export enum DraftTypes {
AMENDMENT_STATEMENT = 'AMENDMENT_STATEMENT',
CHANGE_STATEMENT = 'CHANGE_STATEMENT',
FINANCING_STATEMENT = 'FINANCING_STATEMENT'
}
|
/** Base {@link Provider} implementation for providing the target thread pool. */
abstract class BaseThreadPoolProvider implements Provider<ExecutorServiceT> {
abstract ExecutorService createThreadPool(
int coreSize,
int maxSize,
long keepAliveSeconds,
ThreadFactory factory,
RejectedExecutionHandler rejectedExecutionHandler);
@Override
public final ExecutorServiceT get() {
ExecutorService service =
createThreadPool(coreSize, maxSize, keepAliveSeconds, factory, rejectedExecutionHandler);
if (shutdownDelay != null) {
MoreExecutors.addDelayedShutdownHook(
service, shutdownDelay.toMillis(), TimeUnit.MILLISECONDS);
}
return executorServiceTypeClass.cast(MoreExecutors.listeningDecorator(service));
}
} |
<gh_stars>100-1000
//
// Copyright(C) 1993-1996 Id Software, Inc.
// Copyright(C) 2005-2014 <NAME>
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// DESCRIPTION:
// Common code to parse command line, identifying WAD files to load.
//
#include "doomfeatures.h"
#include "d_iwad.h"
#include "m_argv.h"
#include "w_main.h"
#include "w_merge.h"
#include "w_wad.h"
#include "z_zone.h"
// Parse the command line, merging WAD files that are sppecified.
// Returns true if at least one file was added.
boolean W_ParseCommandLine(void)
{
boolean modifiedgame = false;
int p;
#ifdef FEATURE_WAD_MERGE
// Merged PWADs are loaded first, because they are supposed to be
// modified IWADs.
//!
// @arg <files>
// @category mod
//
// Simulates the behavior of deutex's -merge option, merging a PWAD
// into the main IWAD. Multiple files may be specified.
//
p = M_CheckParmWithArgs("-merge", 1);
if (p > 0)
{
for (p = p + 1; p<myargc && myargv[p][0] != '-'; ++p)
{
char *filename;
modifiedgame = true;
filename = D_TryFindWADByName(myargv[p]);
printf(" merging %s\n", filename);
W_MergeFile(filename);
}
}
// NWT-style merging:
// NWT's -merge option:
//!
// @arg <files>
// @category mod
//
// Simulates the behavior of NWT's -merge option. Multiple files
// may be specified.
p = M_CheckParmWithArgs("-nwtmerge", 1);
if (p > 0)
{
for (p = p + 1; p<myargc && myargv[p][0] != '-'; ++p)
{
char *filename;
modifiedgame = true;
filename = D_TryFindWADByName(myargv[p]);
printf(" performing NWT-style merge of %s\n", filename);
W_NWTDashMerge(filename);
}
}
// Add flats
//!
// @arg <files>
// @category mod
//
// Simulates the behavior of NWT's -af option, merging flats into
// the main IWAD directory. Multiple files may be specified.
//
p = M_CheckParmWithArgs("-af", 1);
if (p > 0)
{
for (p = p + 1; p<myargc && myargv[p][0] != '-'; ++p)
{
char *filename;
modifiedgame = true;
filename = D_TryFindWADByName(myargv[p]);
printf(" merging flats from %s\n", filename);
W_NWTMergeFile(filename, W_NWT_MERGE_FLATS);
}
}
//!
// @arg <files>
// @category mod
//
// Simulates the behavior of NWT's -as option, merging sprites
// into the main IWAD directory. Multiple files may be specified.
//
p = M_CheckParmWithArgs("-as", 1);
if (p > 0)
{
for (p = p + 1; p<myargc && myargv[p][0] != '-'; ++p)
{
char *filename;
modifiedgame = true;
filename = D_TryFindWADByName(myargv[p]);
printf(" merging sprites from %s\n", filename);
W_NWTMergeFile(filename, W_NWT_MERGE_SPRITES);
}
}
//!
// @arg <files>
// @category mod
//
// Equivalent to "-af <files> -as <files>".
//
p = M_CheckParmWithArgs("-aa", 1);
if (p > 0)
{
for (p = p + 1; p<myargc && myargv[p][0] != '-'; ++p)
{
char *filename;
modifiedgame = true;
filename = D_TryFindWADByName(myargv[p]);
printf(" merging sprites and flats from %s\n", filename);
W_NWTMergeFile(filename, W_NWT_MERGE_SPRITES | W_NWT_MERGE_FLATS);
}
}
#endif
//!
// @arg <files>
// @vanilla
//
// Load the specified PWAD files.
//
p = M_CheckParmWithArgs ("-file", 1);
if (p)
{
// the parms after p are wadfile/lump names,
// until end of parms or another - preceded parm
modifiedgame = true; // homebrew levels
while (++p != myargc && myargv[p][0] != '-')
{
char *filename;
filename = D_TryFindWADByName(myargv[p]);
printf(" adding %s\n", filename);
W_AddFile(filename);
}
}
// W_PrintDirectory();
return modifiedgame;
}
|
/**
* Builds a Gaggle DataMatrix data object containing the expression values from
* experiment in the locations specified by rows and columns. Broadcasts this matrix
* to the Gaggle network.
* @author eleanora
*
*/
public void broadcastGeneClusterToGenomeBrowser(Experiment experiment, int[] rows, int[] columns) {
GaggleTranslater gt = new GaggleTranslater(MultipleArrayViewer.this);
ArrayList<Integer> goodLocations = new ArrayList<Integer>();
String[] chrLocations = data.getAnnotationList(AnnotationFieldConstants.CHR_LOCATION, rows);
for(int i=0; i<chrLocations.length; i++) {
if(GenomeBrowserWebstart.isCompleteChrLocation(chrLocations[i])) {
goodLocations.add(rows[i]);
}
}
if(goodLocations.size() < rows.length) {
int[] filteredIndices = new int[goodLocations.size()];
int j=0;
for(Integer i: goodLocations) {
filteredIndices[j] = i;
j++;
}
rows = filteredIndices;
}
if(rows.length <= 0 ) {
JOptionPane.showMessageDialog(new JFrame(),
"No chromosomal location data is loaded for this selection.\n" +
"The genome browser cannot map this data without chromosomal coordinate information.",
"No chromosomal coordinate information",
JOptionPane.ERROR_MESSAGE);
return;
}
DataMatrix dm = gt.createMatrix(rows, AnnotationFieldConstants.CHR_LOCATION, data.getChipAnnotation().getSpeciesName(), null);
int rowCount = dm.getRowCount();
if (rowCount > 100) {
String title = "Broadcast matrix warning";
String msg = "Do you really wish to broadcast " + rowCount + " records?";
int dialogResult = JOptionPane.showConfirmDialog (MultipleArrayViewer.this, msg, title,
JOptionPane.YES_NO_OPTION);
if (dialogResult != JOptionPane.YES_OPTION)
return;
}
String genomeBrowserName = gooseImpl.getGenomeBrowserGoose(dm.getSpecies());
if(genomeBrowserName == null)
return;
gooseImpl.doBroadcastMatrix(dm, genomeBrowserName);
} |
/**
* Starts layouting in a parallel thread; or stops the current layouter
* thread if one is running.
*/
@Override
public void start() {
SpringLayouter.this.damper = 1.0;
prepare(false);
long currentTime = System.currentTimeMillis();
while (SpringLayouter.this.damper > 0 && System.currentTimeMillis() - currentTime < TIMEOUT) {
relax();
}
finish();
} |
<filename>model/BarLeft.py
import pygame
from model.Bar import Bar
from model.BarVertical import BarVertical
class BarLeft(BarVertical):
def __init__(self, screen_size, max_speed):
BarVertical.__init__(self, screen_size, max_speed)
self.surface = pygame.image.load('imgs/bar_left.png')
self.rect = self.surface.get_rect()
self.init_position()
def init_position(self):
self.rect.top = (self.screen_size[1] - self.rect.height) / 2
def on_event(self, event):
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_w:
self.status = Bar.NEGATIVE
self.last_key = pygame.K_w
if event.key == pygame.K_s:
self.status = Bar.POSITIVE
self.last_key = pygame.K_s
elif event.type == pygame.KEYUP:
if (self.last_key == pygame.K_w and event.key == pygame.K_w) or (
self.last_key == pygame.K_s and event.key == pygame.K_s):
self.status = Bar.IDLE
def check_boundary(self):
if self.rect.top < self.rect.width:
self.rect.top = self.rect.width
self.status = Bar.IDLE
if self.rect.bottom > self.screen_size[1] - self.rect.width:
self.rect.bottom = self.screen_size[1] - self.rect.width
self.status = Bar.IDLE
def check_collision(self, ball):
if self.rect.colliderect(ball.rect):
ball.speed[0] = abs(ball.speed[0])
ball.speed[1] += self.speed * 0.1
|
// should be called when there are no more moves or a player has won.
void update_score(tt_game * game) {
if(game->data->winner == game->data->player_char) {
game->scores->player++;
}
else {
game->scores->computer++;
}
} |
On the evening of Friday, February 10, Ruddy shared a drink with the president at Mar-a-Lago. Two days later, he went on Brian Stelter’s CNN show and delivered a shot across the bow to the president’s chief of staff. "I think Reince Priebus, good guy, well-intentioned, but he clearly doesn't know how the federal agencies work,” he said. “He doesn't have a real good system. He doesn't know how the communications flow."
Ruddy’s comments sparked a media frenzy. Was he speaking for the president? Had Trump said this to him?
The walk-back came later that same day, with Ruddy tweeting “Reince just briefed me on new WH plans. Impressive! CNN today my personal view. Told him I have 'open mind' based on his results.”
Ruddy told me the president had not spoken to him about Priebus at Mar-a-Lago. “The president I’d seen on Friday night after the Abe dinner, we had a drink together,” Ruddy said. “It was never raised, Reince.”
“I was just giving my opinion, I’ve done that always,” Ruddy said. And the incident hasn’t discouraged him from doing so; in my interview with him, Ruddy speculated that Trump might not run for a second term, arguing that “certain people need it emotionally. I don’t think he needs it emotionally.”
According to Ruddy, Trump appreciates his media efforts on his behalf. After a CNN appearance in December, he said, Trump “called me a few days later during my Christmas party and said ‘Thank you, I can’t always go on these shows and defend myself.’”
And the weekend after the controversy over Priebus, Ruddy was spotted having dinner with Trump, Priebus, and White House chief strategist Steve Bannon in Palm Beach.
Newsmax took Trump seriously early on, well before he finally followed through with his oft-repeated threats to run for president. “No disrespect to Breitbart—before there was Breitbart, there was Newsmax,” said the Republican pollster Tony Fabrizio, who is friends with Ruddy and worked on Trump’s campaign. “Before the president was probably an avid reader of Breitbart he was an avid reader of Newsmax. Chris and the president developed a relationship several years ago primarily through and with Newsmax.”
“The only outlets who took us seriously were Newsmax, Breitbart, and Fox News,” said Sam Nunberg, a former Trump adviser. “And even Fox News wasn’t that serious about him running, they would just have him on out of ratings. Our two major outlets were Newsmax and Breitbart.”
For Nunberg, Breitbart was useful as the ideological messenger that would fight for Trump’s agenda. But Newsmax was where Trump was able to refine his political image as an outsider, entrepreneur, and independent Republican over the course of several years.
“Breitbart was stronger on immigration but Newsmax overall—I thought it was more helpful for shaping [Trump’s] overall political profile,” Nunberg said. |
def _edit(self) -> None:
editor = os.getenv("VISUAL") or os.getenv("EDITOR") or "vim"
subprocess.run([editor, self._config_file]) |
def add_timeslice(self, name: str, category: str, duration: float) -> None:
slices = self.timeslices().set_index("name")
if name in slices.index:
msg = "timeslice `{}` already defined with duration {}"
existing_duration = slices.loc[name].duration
if not np.isclose(duration, existing_duration):
raise ValueError(msg.format(name, existing_duration))
log.info(msg.format(name, duration))
else:
self._backend.set_timeslice(name, category, duration) |
<reponame>armcknight/RZPlotView<gh_stars>1-10
//
// RZBaseStreamingPlotView-Private.h
// RZPlotView
//
// Created by <NAME> on 3/21/14.
// Copyright (c) 2014 raizlabs. All rights reserved.
//
#ifndef RZPlotView_RZBaseStreamingPlotView_Private_h
#define RZPlotView_RZBaseStreamingPlotView_Private_h
@interface RZBaseStreamingPlotView ()
@property (assign, nonatomic) NSUInteger replacementIndex;
@property (assign, nonatomic) NSUInteger headPointIndex;
@end
#endif
|
/**
*
* @author Christian Wagner
*/
public class GenT2z_Antecedent
{
private Input input;
private String name;
private GenT2zMF_Interface set;
private final boolean DEBUG = false;
/**
* Creates a new instance of Antecedent which uses an Input object.
*
*/
public GenT2z_Antecedent(String name, GenT2zMF_Interface set, Input input)
{
this.name = name;
this.input = input;
this.set = set;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public T1MF_Interface getFS()
{
if(set.getSupport().contains(input.getInput()))
{
return set.getFS(input.getInput());
}
else
{
return null;
}
}
public GenT2zMF_Interface getSet()
{
return set;
}
/**
* Returns this antecedent as a series of antecedents (each based on a single
* zSlice) for interval type-2 FLSs.
* @return
*/
public IT2_Antecedent[] getAntecedentasIT2Sets()
{
IT2_Antecedent[] ants = new IT2_Antecedent[this.getSet().getNumberOfSlices()];
for(int i=0;i<ants.length;i++)
{
ants[i] = new IT2_Antecedent(this.getName()+"_zSlices:"+i,this.getSet().getZSlice(i),this.getInput());
}
return ants;
}
public Input getInput()
{
return input;
}
public boolean equals(Object antecedent)
{
if ( this == antecedent ) return true;
if ( !(antecedent instanceof GenT2z_Antecedent) ) return false;
GenT2z_Antecedent myAntecedent = (GenT2z_Antecedent)antecedent;
return this.getSet()==myAntecedent.getSet();
}
public String toString()
{
return "Antecedent_for:"+this.getSet().getName();
}
} |
Body Odor Disgust sensitivity predicts stronger moral harshness towards moral violations of purity.
Detecting pathogen threats and avoiding disease is fundamental to human survival. The Behavioral Immune System (BIS) framework outlines a set of psychological functions that may have evolved for this purpose. Disgust is a core emotion that plays a pivotal role in the BIS, as it activates the behavioral avoidance motives that prevent people from being in contact with pathogens. To date, there has been little agreement on how disgust sensitivity might underlie moral judgments. Here, we investigated moral violations of “purity” (assumed to elicit disgust) and violations of “harm” (assumed to elicit anger). We hypothesized that individual differences in BIS-related traits would be associated with greater disgust (vs. anger) reactivity to, and greater condemnation of purity (vs. harm) violations. The study was pre-registered (https://osf.io/57nm8/). Participants (N = 632) had to rate scenarios concerning moral wrongness or inappropriateness and regarding disgust and anger. To measure individual differences in the activation of the BIS, we used our recently developed Body Odor Disgust Scale (BODS), a BIS-related trait measure that assesses individual differences in feeling disgusted by body odors. In line with our predictions, we found that scores on the BODS relate more strongly to affective reactions to Purity, as compared to Harm, violations. In addition, BODS relates more strongly to Moral condemnation than to perceived Inappropriateness of an action, and to the condemnation of Purity violations as compared to Harm violations. These results suggest that the BIS is involved in moral judgment, although to some extent this role seems to be specific for violations of “moral purity”, a concept that might be rooted in disease avoidance. Data and scripts to analyze the data are available on the Open Science Framework (OSF) repository: https://osf.io/tk4x5/. Planned analyses are available at https://osf.io/x6g3u/ |
<gh_stars>0
import { QueryCond } from '../../shared/model/query-cond.model';
export interface QuestionQueryCond extends QueryCond {
catId: string;
isSingleAnswer: boolean;
levelId: number;
}
|
def recover(self, job, job_wrapper):
job_id = job.get_job_runner_external_id()
galaxy_id_tag = job_wrapper.get_id_tag()
if job_id is None:
self.put(job_wrapper)
return
cjs = CondorJobState(job_wrapper=job_wrapper, files_dir=job_wrapper.working_directory)
cjs.job_id = str(job_id)
cjs.command_line = job.get_command_line()
cjs.job_wrapper = job_wrapper
cjs.job_destination = job_wrapper.job_destination
cjs.user_log = os.path.join(job_wrapper.working_directory, f'galaxy_{galaxy_id_tag}.condor.log')
cjs.register_cleanup_file_attribute('user_log')
if job.state in (model.Job.states.RUNNING, model.Job.states.STOPPED):
log.debug(f"({job.id}/{job.get_job_runner_external_id()}) is still in {job.state} state, adding to the DRM queue")
cjs.running = True
self.monitor_queue.put(cjs)
elif job.state == model.Job.states.QUEUED:
log.debug(f"({job.id}/{job.job_runner_external_id}) is still in DRM queued state, adding to the DRM queue")
cjs.running = False
self.monitor_queue.put(cjs) |
The research on description method of computer operational geographic information in wargame
Computer wargame system presents a new convenient and efficient method to simulate and research war, which realizes the computerization from the traditional manual wargame map to the digital wargame map. And exact operational geographic information becomes very important while the operational director making the game decisions. The paper learns from the mature raster data coding technique of GIS and introduces an especial method to support decision by dividing operational geographic information into several raster layers, and completes the complicated geographic information coding and information processing by using the Run-Length coding method of GIS. At the end of the paper, it also discusses the application of raster data in wargame map. |
// ConnectWithOptions connects to a local or remote Task Scheduler service. This
// function must run before any other functions in taskmaster can be used. If the
// serverName parameter is empty, a connection to the local Task Scheduler service
// will be attempted. If the user and password parameters are empty, the current
// token will be used for authentication.
func ConnectWithOptions(serverName, domain, username, password string) (TaskService, error) {
var err error
var taskService TaskService
if !taskService.isInitialized {
err = taskService.initialize()
if err != nil {
return TaskService{}, fmt.Errorf("error initializing ITaskService object: %v", err)
}
}
_, err = oleutil.CallMethod(taskService.taskServiceObj, "Connect", serverName, username, domain, password)
if err != nil {
return TaskService{}, fmt.Errorf("error connecting to Task Scheduler service: %v", getTaskSchedulerError(err))
}
if serverName == "" {
serverName, err = os.Hostname()
if err != nil {
return TaskService{}, err
}
}
if domain == "" {
domain = serverName
}
if username == "" {
currentUser, err := user.Current()
if err != nil {
return TaskService{}, err
}
username = strings.Split(currentUser.Username, `\`)[1]
}
taskService.connectedDomain = domain
taskService.connectedComputerName = serverName
taskService.connectedUser = username
res, err := oleutil.CallMethod(taskService.taskServiceObj, "GetFolder", `\`)
if err != nil {
return TaskService{}, fmt.Errorf("error getting the root folder: %v", getTaskSchedulerError(err))
}
taskService.rootFolderObj = res.ToIDispatch()
taskService.isConnected = true
return taskService, nil
} |
import * as React from 'react';
import { useTranslation } from 'react-i18next';
import { isUpstream } from '../../../utils/common';
import {
CENTOS,
CENTOS_EXAMPLE_CONTAINER,
FEDORA_EXAMPLE_CONTAINER,
RHEL,
RHEL_EXAMPLE_CONTAINER,
} from '../../../utils/strings';
type ContainerSourceHelpProps = {
imageName: string;
};
export const ContainerSourceHelp: React.FC<ContainerSourceHelpProps> = ({ imageName }) => {
const { t } = useTranslation();
const labelImage = () => {
const os = {
[RHEL]: RHEL_EXAMPLE_CONTAINER,
[CENTOS]: CENTOS_EXAMPLE_CONTAINER,
};
const label =
os[Object.keys(os).find((name) => imageName?.includes(name))] || FEDORA_EXAMPLE_CONTAINER;
return label;
};
const container = isUpstream() ? FEDORA_EXAMPLE_CONTAINER : labelImage();
return (
<div className="pf-c-form__helper-text" aria-live="polite" data-test="ContainerSourceHelp">
{t('kubevirt-plugin~Example: {{container}}', { container })}
</div>
);
};
|
<gh_stars>0
import { Controller, Get, Render } from '@nestjs/common';
import NextService from './next.service';
interface AboutProperties {
message: string;
}
export interface IndexProperties {
message: string;
}
@Controller()
export default class NextController {
readonly aboutPageMessage = 'About Page.';
messageIndex = 'from server';
messageAbout = 'server';
constructor(private nextService: NextService) {}
@Render('index')
@Get()
public index(): IndexProperties {
return { message: this.messageIndex };
}
@Render('about')
@Get('about')
public about(): AboutProperties {
return { message: this.messageAbout };
}
}
|
Sick of fighting Uber, the taxi industry has decided to join them at their own game – by putting a fleet of unlicensed taxis on the road.
The new cabs look and feel just like a taxi, and charge the same for a trip. But they don't have taxi licences, and pay no taxi registration fee to the government. As recently as September taxi licence plates were selling for $165,000.
One of the private-hire taxis linked to 13CABS.
"The implication seems to be that if 13CABS can do it, anyone can provide a service without a licence," said Professor Allan Fels, former head of the Australian Competition and Consumer Commission and now member of the Uber global advisory board.
"This would also seemingly cut the value of licences to zero." |
/**
* Client object is a useful tool to handle any http interaction between client and Kloudless API
* Server.
*/
public abstract class BaseHttpClient {
static Gson gson = new GsonBuilder().setPrettyPrinting().create();
/**
* Get the prefix of URL
*
* @return String url prefix
*/
abstract public String getUrlPrefix();
/**
* Get default http headers
*
* @return Map, key, value pair of arguments
*/
abstract public Map<String, Object> getDefaultHeaders();
/**
* Get default http query string
*
* @return Map, key, value pair of query parameters
*/
abstract public Map<String, Object> getDefaultQueryParameters();
/**
* Http GET method
*
* @param url URL of API supported by Kloudless API server.
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase get(String url) throws ApiException {
return get(url, null);
}
/**
* Http GET method
*
* @param url URL of API supported by Kloudless API server.
* @param headers Addition header parameter in this request.
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase get(String url, Map<String, Object> headers) throws ApiException {
return runHttpTransaction("GET", url, "Json", headers, null);
}
/**
* Http GET method for download
*
* @param url URL of a Kloudless API endpoint
* @return ResponseRaw A ResponseRaw instance
* @throws ApiException An error if the response status code isn't 2xx
*/
public ResponseRaw getBinary(String url) throws ApiException {
return getBinary(url, null);
}
/**
* Http GET method for download
*
* @param url URL of a Kloudless API endpoint
* @param headers Additional headers for the request
* @return ResponseRaw A ResponseRaw instance
* @throws ApiException An error if the response status code isn't 2xx
*/
public ResponseRaw getBinary(String url, Map<String, Object> headers) throws ApiException {
return (ResponseRaw) runHttpTransaction(
"GET", url, "Json", headers, null, false);
}
/**
* Http POST method
*
* @param url URL of API supported by Kloudless API server.
* @param content The post data will be in the request boby
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase post(String url, Map<String, Object> content) throws ApiException {
return post(url, null, content);
}
/**
* Http POST method
*
* @param url URL of API supported by Kloudless API server.
* @param headers Addition header parameter in this request.
* @param content The post data wiil be in the request body
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase post(String url, Map<String, Object> headers, Map<String, Object> content)
throws ApiException {
return runHttpTransaction("POST", url, "Json", headers, content);
}
/**
* Http POST method
*
* @param url URL of API supported by Kloudless API server.
* @param headers Addition header parameter in this request.
* @param uploadFile File object will be posted to the server.
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase post(String url, Map<String, Object> headers, File uploadFile)
throws ApiException {
return runHttpTransaction("POST", url, "File", headers, uploadFile);
}
/**
* Http PATCH method
*
* @param url URL of API supported by Kloudless API server.
* @param content The patch data will be in the request boby
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase patch(String url, Map<String, Object> content) throws ApiException {
return patch(url, null, content);
}
/**
* Http PATCH method
*
* @param url URL of API supported by Kloudless API server.
* @param headers Addition header parameter in this request.
* @param content The patch data wiil be in the request body
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase patch(String url, Map<String, Object> headers, Map<String, Object> content)
throws ApiException {
return runHttpTransaction("PATCH", url, "Json", headers, content);
}
/**
* Http PATCH method
*
* @param url URL of API supported by Kloudless API server.
* @param headers Addition header parameter in this request.
* @param uploadFile File object will be patch to the server.
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase patch(String url, Map<String, Object> headers, File uploadFile)
throws ApiException {
return runHttpTransaction("PATCH", url, "File", headers, uploadFile);
}
/**
* Http PUT method
*
* @param url URL of API supported by Kloudless API server.
* @param content The put data will be in the request boby
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase put(String url, Map<String, Object> content) throws ApiException {
return put(url, null, content);
}
/**
* Http PUT method
*
* @param url URL of API supported by Kloudless API server.
* @param headers Addition header parameter in this request.
* @param content The put data wiil be in the request body
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase put(String url, Map<String, Object> headers, Map<String, Object> content)
throws ApiException {
return runHttpTransaction("PUT", url, "Json", headers, content);
}
/**
* Http PUT method
*
* @param url URL of API supported by Kloudless API server.
* @param headers Addition header parameter in this request.
* @param uploadFile File object will be put to the server.
* @return ResponseBase Class and list of data or JsonObject is inside the data attribute.
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase put(String url, Map<String, Object> headers, File uploadFile)
throws ApiException {
return runHttpTransaction("PUT", url, "File", headers, uploadFile);
}
/**
* Http DELETE method
*
* @param url URL of API supported by Kloudless API server.
* @return ResponseBase object but might be empty content
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase delete(String url) throws ApiException {
return delete(url, null);
}
/**
* Http DELETE method
*
* @param url URL of API supported by Kloudless API server.
* @param headers Addition header parameter in this request.
* @return ResponseBase object but might be empty content
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
public ResponseBase delete(String url, Map<String, Object> headers) throws ApiException {
return runHttpTransaction("DELETE", url, "Json", headers, null);
}
/**
* This method finds the proper rawExecute method and dispatches the request
*
* @param method the HTTP method
* @param url the URL string
* @param format String should be one of Json, File or Form
* @param headers the key-value pairs of the request headers
* @param content the key-value pairs of the request body
* @return ResponseBase A ResponseBase instance
* @throws ApiException Error if the request failed
*/
private ResponseBase runHttpTransaction(
String method, String url, String format, Map<String, Object> headers,
Object content) throws ApiException {
return runHttpTransaction(method, url, format, headers, content, true);
}
/**
* This method finds the proper rawExecute method and dispatches the request
*
* @param method the HTTP method
* @param url the URL string
* @param format String should be one of Json, File or Form
* @param headers the key-value pairs of the request headers
* @param content the key-value pairs of the request body
* @param tryJson whether to try parsing the response body as a JSON object
* @return ResponseBase A ResponseBase instance
* @throws ApiException Error if the request failed
*/
private ResponseBase runHttpTransaction(
String method, String url, String format, Map<String, Object> headers,
Object content, Boolean tryJson) throws ApiException {
url = assembleUrl(url);
headers = mergeHeaders(headers);
format = format.toUpperCase();
HttpResponse httpResponse;
switch (format) {
case "JSON":
httpResponse = rawJsonExecute(method, url, headers, content);
break;
case "FORM":
httpResponse = rawFormExecute(method, url, headers, content);
break;
case "FILE":
httpResponse = rawFileExecute(method, url, headers, content);
break;
default:
throw new IllegalArgumentException("Invalid HTTP method: " + format);
}
return responseFactory(httpResponse, url, headers, method, tryJson);
}
/**
* The method of execute the http request and supports form data content
*
* @param method the Http method
* @param url the url string
* @param headers the key, value pairs of http headers
* @param content the key, value pairs of http body
* @return HttpResponse the httpResponse object
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
protected HttpResponse rawFormExecute(String method, String url, Map<String, Object> headers,
Object content) throws ApiException {
try {
content = Optional.ofNullable(content).orElse(new HashMap<String, Object>());
List<NameValuePair> requestContent = new ArrayList<NameValuePair>();
for (Entry entry : ((Map<String, Object>) content).entrySet()) {
requestContent.add(
new BasicNameValuePair((String) entry.getKey(), (String) entry.getValue()));
}
HttpEntity entity = new UrlEncodedFormEntity(requestContent);
return handleResponse(assembleHttpRequest(method, url, entity, headers,
ContentType.APPLICATION_FORM_URLENCODED.getMimeType()));
} catch (ApiException | ParseException | IOException e) {
throw new ApiException(
"Run http " + method + " to " + url + " failed, " + e.getMessage(), e);
}
}
/**
* The method of execute the http request and supports json data content
*
* @param method the Http method
* @param url the url string
* @param headers the key, value pairs of http headers
* @param content the key, value pairs of http body
* @return HttpResponse the httpResponse object
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
protected HttpResponse rawJsonExecute(String method, String url, Map<String, Object> headers,
Object content) throws ApiException {
try {
content = (Map<String, Object>) Optional.ofNullable(content)
.orElse(new HashMap<String, Object>());
HttpEntity entity = new StringEntity(gson.toJson(content).toString());
return handleResponse(assembleHttpRequest(method, url, entity, headers,
ContentType.APPLICATION_JSON.getMimeType()));
} catch (ApiException | ParseException | IOException e) {
throw new ApiException(
"Run http " + method + " to " + url + " failed, " + e.getMessage(), e);
}
}
/**
* The method of execute the http request and supports file uploading
*
* @param method the Http method
* @param url the url string
* @param headers the key, value pairs of http headers
* @param uploadFile the file object to upload
* @return HttpResponse the httpResponse object
* @throws ApiException An error if any data other than 2xx from Kloudless API server or
* upstream services.
*/
protected HttpResponse rawFileExecute(String method, String url, Map<String, Object> headers,
Object uploadFile) throws ApiException {
try {
HttpEntity entity = EntityBuilder.create().setFile((File) uploadFile).build();
return handleResponse(assembleHttpRequest(method, url, entity, headers,
ContentType.APPLICATION_OCTET_STREAM.getMimeType()));
} catch (ApiException | ParseException | IOException e) {
throw new ApiException(
"Run http " + method + " to " + url + " failed, " + e.getMessage(), e);
}
}
/**
* This method is assemble HttpReques object
*
* @param method the Http method
* @param url the url string
* @param entity the object of HttpEntity
* @param headers the key, value pairs of http headers
* @param contentType the contentType of this http rquest header
* @return HttpRequestBase The HttpRequest object
* @throws UnsupportedEncodingException Error when the unsupport method is inputed
*/
private static HttpRequestBase assembleHttpRequest(String method, String url, HttpEntity entity,
Map<String, Object> headers, String contentType) throws UnsupportedEncodingException {
HttpRequestBase request = requestFactory(method, Application.getBaseUrl() + url, entity);
request = assembleHeaders(request, headers, contentType);
return request;
}
/**
* The method of receving the httpResponse and check the status of httpResponse
*
* @param request A HttpRequest object which has populated all essential data
* @return HttpResponse the HttpResponse object
* @throws ApiException An error if any data other than 2xx from Kloudless API server
* or upstream services.
* @throws ClientProtocolException Error when http call failed
* @throws IOException Error when http call failed
* @throws ParseException Error when parse the JsonObject failed
*/
private static HttpResponse handleResponse(HttpRequestBase request)
throws ClientProtocolException, IOException, ApiException, ParseException {
HttpClient httpClient = HttpClients.createDefault();
HttpResponse response = httpClient.execute(request);
if (response.getStatusLine().getStatusCode() < 200
|| response.getStatusLine().getStatusCode() >= 300) {
throw new ApiException("Get error response from API server, status code:"
+ response.getStatusLine().getStatusCode() + ", and its message"
+ EntityUtils.toString(response.getEntity()), null);
}
return response;
}
/**
* This method merge header from API call and default Headers in the Client object and
* attributes of default headers will be overwrite if duplicated
*
* @param headers the key value in headers from API call.
* @return mergedHeaders the final key value of headers
*/
protected Map<String, Object> mergeHeaders(Map<String, Object> headers) {
headers = Optional.ofNullable(headers).orElse(new HashMap<String, Object>());
Map<String, Object> mergedMap = new HashMap<String, Object>();
mergedMap.putAll(this.getDefaultHeaders());
// overwrited by additional headers
mergedMap.putAll(headers);
return mergedMap;
}
/**
* Add headers from key value map to request object
*
* @param request the HttpRequest object
* @param headers the key value pair of headers
* @param contentType the content type in the headers
* @return HttpRequest object
*/
private static HttpRequestBase assembleHeaders(HttpRequestBase request,
Map<String, Object> headers, String contentType) {
for (Entry entry : headers.entrySet()) {
request.addHeader((String) entry.getKey(), (String) entry.getValue());
}
request.addHeader("User-Agent", "kloudless-java/" + Application.getSDKVersion());
request.addHeader("Content-Type", contentType);
return request;
}
/**
* Assemble the whole Url for http request
*
* @param url relative url path from parameters of API call
* @return String URL with baseUrl , prefix , paths and query string.
*/
protected String assembleUrl(String url) {
return this.getUrlPrefix() + appendDefaultQueryParameters(url);
}
/**
* This method is merge query parameters from API call and existed query parameters which might
* from Client object or generated and populated to Resource object.
*
* @param url relative url path from parameters of API call
* @return String the handled query string
*/
private String appendDefaultQueryParameters(String url) {
if (url.indexOf("?") != -1) {
String[] urls = url.split("\\?");
url = urls[0] + "?" + toQueryString(mergeDeaultQueryParameters(getQueryParams(url)));
} else {
String queryString =
toQueryString(mergeDeaultQueryParameters(new HashMap<String, String>()));
if (!queryString.isEmpty()) {
url = url + "?" + queryString;
}
}
return url;
}
/**
* Parse the query string to key, value pair of query parameters
*
* @param url relative url path from parameters of API call
* @return map the key, value pairs of query parameters
*/
private Map<String, String> getQueryParams(String url) {
String[] urls = url.split("\\?");
String[] queryString = urls[1].split("&");
return Arrays.asList(queryString).stream().map(param -> param.split("=", 2))
.collect(Collectors.toMap(a -> a[0], a -> a[1]));
}
/**
* Merge the query parameters with defaultQueryParameters
*
* @param querys Key value pairs of query parameters from API call
* @return map A merged key value pair of query parameters
*/
private Map<String, String> mergeDeaultQueryParameters(Map<String, String> querys) {
for (Entry entry : this.getDefaultQueryParameters().entrySet()) {
if (!querys.containsKey(entry.getKey())) {
querys.put((String) entry.getKey(), (String) entry.getValue());
}
}
return querys;
}
/**
* Transfer key value pair of query parameters to query string
*
* @param queryParameters Key value pairs of query parameters
* @return String queryString
*/
private static String toQueryString(Map<String, String> queryParameters) {
return queryParameters.entrySet().stream()
.map(element -> element.getKey() + "=" + element.getValue())
.collect(Collectors.joining("&"));
}
/**
* Factory of httpRequest object
*
* @param method http method
* @param url the string of url
* @param entity the http body
* @return HttpRequest a http request object.
* @throws UnsupportedEncodingException Error when the http method is not supported
*/
private static HttpRequestBase requestFactory(String method, String url, HttpEntity entity)
throws UnsupportedEncodingException {
switch (method) {
case "GET":
return new HttpGet(url);
case "POST":
HttpPost post = new HttpPost(url);
post.setEntity(entity);
return post;
case "PUT":
HttpPut put = new HttpPut(url);
put.setEntity(entity);
return put;
case "PATCH":
HttpPatch patch = new HttpPatch(url);
patch.setEntity(entity);
return patch;
case "DELETE":
return new HttpDelete(url);
default:
throw new IllegalArgumentException("Invalid HTTP method: " + method);
}
}
/**
* Response object factory generates Resource/ResourceList/ResponseRaw object
*
* @param response httpResponse object
* @param url the url string of current http request
* @param headers the key value pairs of current http headers
* @param method the http method
* @param tryJson whether to try parse the response body as a JSON object
* @return ResponseBase object, could be ResponseRaw, Resource, or ResourceList depends on the
* JsonObject type from API Server
* @throws ApiException Errors when trandfer to ResponseBase object
*/
private ResponseBase responseFactory(
HttpResponse response, String url, Map<String, Object> headers, String method,
Boolean tryJson) throws ApiException {
if (response != null && response.getEntity() != null
&& tryJson
&& ContentType.get(response.getEntity()).toString()
.equals(ContentType.APPLICATION_JSON.getMimeType())) {
try {
JsonObject data =
gson.fromJson(EntityUtils.toString(response.getEntity()), JsonObject.class);
if (data.has("type") && "object_list".equals(data.get("type").getAsString())) {
List<Resource> tmpList = new ArrayList<Resource>();
JsonArray array = data.get("objects").getAsJsonArray();
for (int i = 0; i < array.size(); i++) {
JsonObject element = array.get(i).getAsJsonObject();
Resource one = new Resource(element,
generateResourceUrl(url, element, method, true), headers);
tmpList.add(one);
}
ResourceList rList = new ResourceList(url, headers, tmpList, data);
return rList;
} else if (data.has("id")) {
return new Resource(data, generateResourceUrl(url, data, method, false),
headers);
} else {
return new ResponseJson(data, url, headers);
}
} catch (JsonSyntaxException | ParseException | IOException | URISyntaxException e) {
throw new ApiException("Generate Resource object failed, " + e.getMessage(), e);
}
} else {
return new ResponseRaw(response, url, headers);
}
}
/**
* This method temporarily handle query string.
*
* @param url the url of this http query
* @param data the response object
* @param method the http method
* @param isListUrl boolean of the queried url is for list or not
* @return String of handled url
* @throws URISyntaxException the error occurs when url format is invalid
*/
private static String generateResourceUrl(String url, JsonObject data, String method,
boolean isListUrl) throws URISyntaxException {
if (url.indexOf("?") != -1) {
String[] unhandledUrl = url.split("\\?");
return assembleUrlWithId(unhandledUrl[0], data, method, isListUrl) + "?"
+ unhandledUrl[1];
}
return assembleUrlWithId(url, data, method, isListUrl);
}
/**
* Assemale or retrieve url from original http reqeust
*
* @param url url of original http URL
* @param data JsonObject data
* @param method the http method
* @param isListUrl boolean of this url is querying list or object
* @return String the url with object Id
* @throws URISyntaxException errors if the url is invalid
*/
private static String assembleUrlWithId(String url, JsonObject data, String method,
boolean isListUrl) throws URISyntaxException {
Optional<JsonElement> href = Optional.ofNullable(data.get("href"));
if (href.isPresent()) {
String fromHref = href.get().getAsString();
URI tempUrl = new URIBuilder(fromHref).build();
return tempUrl.getPath();
}
Optional<JsonElement> id = Optional.ofNullable(data.get("id"));
if (id.isPresent()) {
Optional<JsonElement> dataOptional = Optional.ofNullable(data.get("type"));
Optional<JsonElement> apiOptional = Optional.ofNullable(data.get("api"));
String apiType = apiOptional.isPresent() ? apiOptional.get().getAsString() : "";
String dataType = dataOptional.isPresent() ? dataOptional.get().getAsString() : "";
if (apiType.equals("storage")) {
URI tempUrl = new URIBuilder(url).build();
String[] paths = tempUrl.getPath().split("/");
return String.join("/", Arrays.asList(paths).subList(0, 4))
+ String.format("/%s/%ss/%s", apiType, dataType, id.get().getAsString());
}
if (method.equals("POST") || isListUrl) {
URI tempUrl = new URIBuilder(url).build();
return tempUrl.getPath() + "/" + id.get().getAsString();
}
}
return url;
}
} |
Charlatans. It's been a bull market for fake populists the past few years. With wages stagnant, households feel like inflation is higher than it is, and they keep hearing that it is from fact-challenged fraudsters. If it's not pop historian Niall Ferguson putting on his tinfoil hat and saying inflation is "really" 10 percent, it's pop pundit Erick Erickson bemoaning rising milk and bread prices that he knows aren't rising. But the truth is catching up. After playing the "it's-always-1980" game where stagflation is always and everywhere the problem, Erickson has had to admit that it's just that -- a game. And he had to admit it, because Krugman called him on it. In other words, nerds bearing charts beat demagogues bearing derp. (For the uninitiated, "derp", as Noah Smith defines it, means loudly repeating things you believe in the face of contrary evidence).
Cranks. It's also been a bull market for crackpot economists the past few years. Now, so-called Austrian economists did do a good job predicting the housing bubble during the boom, but they could hardly have done a worse job during the bust. They've looked at the Fed's ballooning balance sheet, and screamed that Zimbabwe is coming, Zimbabwe is coming! Well, it hasn't, and it's not. But that hasn't deterred the Austrians: they think the price of gold shows the "true" inflation from the monetary base expanding, so they've been right all along! But what about now? Gold is down 24 percent from a year ago, and 36 percent from its August 2011 highs -- and that despite more "money-printing" by the Fed. So where's the inflation now? (And, sorry Austrians, an increase in the monetary base doesn't count if there's no increase in prices).
Economists. Conservative economists haven't done much better. They too have looked at the Fed's balance sheet and fallen for the inflation hype. Marty Feldstein, for one, has predicted again and again and again that inflation is a risk -- only to be wrong and wrong and wrong. Even after admitting these errors, Feldstein just reiterated his fear of future inflation. And then he called on the Fed to start tapering its bond purchases now, because ... I have no idea why. Yes, Feldstein said something about financial stability, which is the new bugaboo of failed inflationistas, but he didn't provide any actual evidence. He just said that rising real interest rates wouldn't hurt the economy even though they would hurt housing. It didn't make much sense. Nor did it when the Bank for International Settlements or Raghuram Rajan said much the same. It's a depressing kind of progress. Conservatives want tighter money, but they know they can't justify it by crying inflation anymore -- so they cry financial stability instead.
Nothing can kill zombie ideas. Not facts. Not figures. And certainly not failed predictions. Now, inflation fearmongers couldn't have a worse track record than they do, but it won't change what they think the Fed should do. At most, it will change why they think the Fed should do it. To use a technical term, it's derp. And it's derp the data-driven Fed shouldn't be intimidated by -- though its tapering talk suggests otherwise.
But Bernanke should remember: Derp doesn't fall under the Fed's dual mandate.
We want to hear what you think about this article. Submit a letter to the editor or write to [email protected]. |
/**
* @author Florian Weger
*/
public class TestNavigator {
private static Artifact ARTIFACT;
private static CollectionArtifact COLLECTION;
private static Container CONTAINER;
private static Project PROJECT;
private static MapArtifact MAP;
private static MetaModel META_MODEL;
private static String LONG_KEY = "L";
private static Long LONG = 1L;
private static String STRING_KEY = "S";
private static String STRING = "STRING";
private static String CHAR_KEY = "C";
private static Character CHARACTER = 'C';
private static String NULL_KEY = "N";
private static Object NULL = null;
private static String COLLECTION_PATH = "COLLECTION";
private static String CONTAINER_PATH = "CONTAINER";
private static String PROJECT_PATH = "PROJECT";
private static String MAP_PATH = "MAP";
private static String META_MODEL_PATH = "METAMODEL";
private static NavigatorProvider navigators;
@BeforeClass
public static void setUpBeforeClass() {
navigators = CLOUD.queryFactory().navigatorProvider();
ARTIFACT = WS.createArtifact();
COLLECTION = WS.createCollection(true);
CONTAINER = WS.createPackage();
PROJECT = WS.createProject();
MAP = WS.createMap();
META_MODEL = WS.createMetaModel();
ARTIFACT.setPropertyValue(WS, LONG_KEY, LONG);
ARTIFACT.setPropertyValue(WS, STRING_KEY, STRING);
ARTIFACT.setPropertyValue(WS, CHAR_KEY, CHARACTER);
ARTIFACT.setPropertyValue(WS, NULL_KEY, NULL);
ARTIFACT.setPropertyValue(WS, COLLECTION_PATH, COLLECTION);
COLLECTION.setPropertyValue(WS, CONTAINER_PATH, CONTAINER);
CONTAINER.setPropertyValue(WS, PROJECT_PATH, PROJECT);
PROJECT.setPropertyValue(WS, MAP_PATH, MAP);
MAP.setPropertyValue(WS, META_MODEL_PATH, META_MODEL);
}
@AfterClass
public static void tearDown() {
WS.rollbackAll();
}
@Test
public void testTo() {
Artifact result = navigators.from(ARTIFACT).to(COLLECTION_PATH).get();
assertEquals(COLLECTION, result);
}
@Test(expected = PropertyDoesNotExistException.class)
public void testToException() {
navigators.from(ARTIFACT).to(PROJECT_PATH).get();
}
@Test(expected = UnsupportedOperationException.class)
public void testToException2() {
navigators.from(Contexts.of("e")).get(Contexts.empty().put("e", new Object()));
}
@Test
public void testToCollection() {
CollectionArtifact result = navigators.from(ARTIFACT).toCollection(COLLECTION_PATH).get();
assertEquals(COLLECTION, result);
}
@Test
public void testToMap() {
MapArtifact result = navigators.from(PROJECT).toMap(MAP_PATH).get();
assertEquals(MAP, result);
}
@Test
public void testToMetaModel() {
MetaModel result = navigators.from(MAP).toMetaModel(META_MODEL_PATH).get();
assertEquals(META_MODEL, result);
}
@Test
public void testToProject() {
Project result = navigators.from(CONTAINER).toProject(PROJECT_PATH).get();
assertEquals(PROJECT, result);
}
@Test
public void testToContainer() {
Container result = navigators.from(COLLECTION).toContainer(CONTAINER_PATH).get();
assertEquals(CONTAINER, result);
}
@Test
public void testToOwner() {
Owner result = navigators.from(ARTIFACT).toOwner().get();
assertEquals(ARTIFACT.getOwner(), result);
}
@Test
public void testToTool() {
Tool result = navigators.from(ARTIFACT).toTool().get();
assertEquals(ARTIFACT.getTool(), result);
}
@Test
public void testToNumber() {
Number result = navigators.from(ARTIFACT).toNumber(LONG_KEY).get();
assertEquals(LONG, result);
}
@Test
public void testToCharacter() {
Character result = navigators.from(ARTIFACT).toCharacter(CHAR_KEY).get();
assertEquals(CHARACTER, result);
}
@Test
public void testToString() {
String result = navigators.from(ARTIFACT).toString(STRING_KEY).get();
assertEquals(STRING, result);
}
} |
// Initializes an Execution Engine object depending on the parameters.
public static ExecutionEngine initializeEngine(Parameters parameters, List<JoiningNetworkOfTupleSets> candidateNetworks,
SchemaGraph schemaGraph, SQLDatabase database, List<TupleSet> tupleSets) {
ExecutionEngine executionEngine = null;
switch(parameters.executionEngineAlgorithm) {
case Naive:
executionEngine = new NaiveExecutionEngine(
candidateNetworks, schemaGraph, database, tupleSets,
parameters.maxTuples, parameters.keywords,
parameters.andSemantics, parameters.printResultsOrderedByTable,
parameters.efficientPlanGenerator
);
break;
case Sparse:
executionEngine = new SparseExecutionEngine(
candidateNetworks, schemaGraph, database, tupleSets,
parameters.maxTuples, parameters.keywords,
parameters.andSemantics, parameters.printResultsOrderedByTable,
parameters.efficientPlanGenerator
);
break;
case SinglePipelined:
executionEngine = null;
break;
case GlobalPipelined:
executionEngine = new GlobalPipelineExecutionEngine(
candidateNetworks, tupleSets, schemaGraph, database,
parameters.maxTuples, parameters.keywords,
parameters.andSemantics, parameters.printResultsOrderedByTable
);
break;
}
return executionEngine;
} |
def text_in_page(tmp_proj, page_path, text):
page = tmp_proj / "site" / page_path
assert page.exists(), "%s does not exist" % page_path
contents = page.read_text(encoding="utf-8")
return re.search(text, contents) |
<reponame>gregorwolf/sap-business-one-odata-cap
export declare enum EmployeeTransferStatusEnum {
ets_New = "ets_New",
ets_Processing = "ets_Processing",
ets_Sent = "ets_Sent",
ets_Received = "ets_Received",
ets_Accepted = "ets_Accepted",
ets_Error = "ets_Error"
}
//# sourceMappingURL=EmployeeTransferStatusEnum.d.ts.map |
def upload(self, localpath, path=None, interactive=False):
if path is None:
if localpath.startswith(self._config.getLocalRoot()):
path = localpath[len(self._config.getLocalRoot()):]
else:
path = '/'
self._folder_count = 1
self._num_folders = self.getNumLocalFolders(path)
self._file_count = 1
self._num_files = self.getNumLocalFiles(path)
if interactive:
if self._num_folders + self._num_files > 2:
self._bar = progressbar.ProgressBar(width=80)
self._upload(localpath, path)
self._folder_count = 0
self._file_count = 0 |
#include <iostream>
#include<bits/stdc++.h>
using namespace std;
long long arr[100005],l[100005],r[100005],d[100005],ans[100005],q[100005];
int main() {
long long int t,n,m,k,x,y,i,j,count=0;
cin>>n>>m>>k;
for(i=1;i<=n;i++){
cin>>arr[i];
}
for (i = 1; i <= m; i++){
cin >> l[i] >> r[i] >> d[i];
}
for (i = 1; i<= k; i++){
cin >> x >> y;
q[x]++;
q[y + 1]--;
}
for (i = 1; i <= m; i++)
q[i] += q[i - 1];
for (i = 1; i <= m; i++){
long long int left = l[i];
long long int right = r[i];
ans[left] += (d[i] * q[i]);
ans[right + 1] -= (d[i] * q[i]);
}
for (i = 1; i <= n; i++)
ans[i] += ans[i - 1];
for (i = 1; i<= n; i++)
cout << arr[i] + ans[i] << ' ';
return 0;
}
|
#!/usr/bin/env python3
import os
import random
import time
import unittest
from collections import defaultdict
from functools import wraps
import cereal.messaging as messaging
from cereal import car
from common.params import Params
from common.spinner import Spinner
from common.timeout import Timeout
from panda import Panda
from selfdrive.boardd.boardd import can_list_to_can_capnp
from selfdrive.car import make_can_msg
from selfdrive.test.helpers import phone_only, with_processes
def reset_panda(f):
@wraps(f)
def wrapper(*args, **kwargs):
p = Panda()
for i in [0, 1, 2, 0xFFFF]:
p.can_clear(i)
p.reset()
p.close()
f(*args, **kwargs)
return wrapper
class TestBoardd(unittest.TestCase):
@classmethod
def setUpClass(cls):
os.environ['STARTED'] = '1'
os.environ['BOARDD_LOOPBACK'] = '1'
cls.spinner = Spinner()
@classmethod
def tearDownClass(cls):
cls.spinner.close()
@phone_only
@reset_panda
@with_processes(['pandad'])
def test_loopback(self):
# wait for boardd to init
time.sleep(2)
with Timeout(60, "boardd didn't start"):
sm = messaging.SubMaster(['pandaStates'])
while sm.rcv_frame['pandaStates'] < 1:
sm.update(1000)
# boardd blocks on CarVin and CarParams
cp = car.CarParams.new_message()
safety_config = car.CarParams.SafetyConfig.new_message()
safety_config.safetyModel = car.CarParams.SafetyModel.allOutput
cp.safetyConfigs = [safety_config]
params = Params()
params.put("CarVin", b"0"*17)
params.put_bool("ControlsReady", True)
params.put("CarParams", cp.to_bytes())
sendcan = messaging.pub_sock('sendcan')
can = messaging.sub_sock('can', conflate=False, timeout=100)
time.sleep(1)
n = 1000
for i in range(n):
self.spinner.update(f"boardd loopback {i}/{n}")
sent_msgs = defaultdict(set)
for _ in range(random.randrange(10)):
to_send = []
for __ in range(random.randrange(100)):
bus = random.randrange(3)
addr = random.randrange(1, 1<<29)
dat = bytes([random.getrandbits(8) for _ in range(random.randrange(1, 9))])
sent_msgs[bus].add((addr, dat))
to_send.append(make_can_msg(addr, dat, bus))
sendcan.send(can_list_to_can_capnp(to_send, msgtype='sendcan'))
max_recv = 10
while max_recv > 0 and any(len(sent_msgs[bus]) for bus in range(3)):
recvd = messaging.drain_sock(can, wait_for_one=True)
for msg in recvd:
for m in msg.can:
if m.src >= 128:
k = (m.address, m.dat)
assert k in sent_msgs[m.src-128]
sent_msgs[m.src-128].discard(k)
max_recv -= 1
# if a set isn't empty, messages got dropped
for bus in range(3):
assert not len(sent_msgs[bus]), f"loop {i}: bus {bus} missing {len(sent_msgs[bus])} messages"
if __name__ == "__main__":
unittest.main()
|
import React from 'react'
import Message from './Message'
import { backgroundColor } from '../../globalStyles'
interface Props {
visible: boolean
banner?: string
message?: string
onPress?: () => void
}
const Pending: React.FC<Props> = ({ visible, banner, message, onPress }) => {
return (
<Message
visible={visible}
banner={banner || 'Pending'}
message={message || ''}
icon="alarm"
backgroundColor={backgroundColor}
onPress={onPress}
/>
)
}
export default Pending
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.