content
stringlengths
10
4.9M
#include "config.h" #include "bformatdec.h" #include <algorithm> #include <array> #include <cmath> #include <utility> #include "almalloc.h" #include "alnumbers.h" #include "filters/splitter.h" #include "front_stablizer.h" #include "mixer.h" #include "opthelpers.h" BFormatDec::BFormatDec(const size_t inchans, const al::span<const ChannelDec> coeffs, const al::span<const ChannelDec> coeffslf, const float xover_f0norm, std::unique_ptr<FrontStablizer> stablizer) : mStablizer{std::move(stablizer)}, mDualBand{!coeffslf.empty()}, mChannelDec{inchans} { if(!mDualBand) { for(size_t j{0};j < mChannelDec.size();++j) { float *outcoeffs{mChannelDec[j].mGains.Single}; for(const ChannelDec &incoeffs : coeffs) *(outcoeffs++) = incoeffs[j]; } } else { mChannelDec[0].mXOver.init(xover_f0norm); for(size_t j{1};j < mChannelDec.size();++j) mChannelDec[j].mXOver = mChannelDec[0].mXOver; for(size_t j{0};j < mChannelDec.size();++j) { float *outcoeffs{mChannelDec[j].mGains.Dual[sHFBand]}; for(const ChannelDec &incoeffs : coeffs) *(outcoeffs++) = incoeffs[j]; outcoeffs = mChannelDec[j].mGains.Dual[sLFBand]; for(const ChannelDec &incoeffs : coeffslf) *(outcoeffs++) = incoeffs[j]; } } } void BFormatDec::process(const al::span<FloatBufferLine> OutBuffer, const FloatBufferLine *InSamples, const size_t SamplesToDo) { ASSUME(SamplesToDo > 0); if(mDualBand) { const al::span<float> hfSamples{mSamples[sHFBand].data(), SamplesToDo}; const al::span<float> lfSamples{mSamples[sLFBand].data(), SamplesToDo}; for(auto &chandec : mChannelDec) { chandec.mXOver.process({InSamples->data(), SamplesToDo}, hfSamples.data(), lfSamples.data()); MixSamples(hfSamples, OutBuffer, chandec.mGains.Dual[sHFBand], chandec.mGains.Dual[sHFBand], 0, 0); MixSamples(lfSamples, OutBuffer, chandec.mGains.Dual[sLFBand], chandec.mGains.Dual[sLFBand], 0, 0); ++InSamples; } } else { for(auto &chandec : mChannelDec) { MixSamples({InSamples->data(), SamplesToDo}, OutBuffer, chandec.mGains.Single, chandec.mGains.Single, 0, 0); ++InSamples; } } } void BFormatDec::processStablize(const al::span<FloatBufferLine> OutBuffer, const FloatBufferLine *InSamples, const size_t lidx, const size_t ridx, const size_t cidx, const size_t SamplesToDo) { ASSUME(SamplesToDo > 0); /* Move the existing direct L/R signal out so it doesn't get processed by * the stablizer. Add a delay to it so it stays aligned with the stablizer * delay. */ float *RESTRICT mid{al::assume_aligned<16>(mStablizer->MidDirect.data())}; float *RESTRICT side{al::assume_aligned<16>(mStablizer->Side.data())}; for(size_t i{0};i < SamplesToDo;++i) { mid[FrontStablizer::DelayLength+i] = OutBuffer[lidx][i] + OutBuffer[ridx][i]; side[FrontStablizer::DelayLength+i] = OutBuffer[lidx][i] - OutBuffer[ridx][i]; } std::fill_n(OutBuffer[lidx].begin(), SamplesToDo, 0.0f); std::fill_n(OutBuffer[ridx].begin(), SamplesToDo, 0.0f); /* Decode the B-Format input to OutBuffer. */ process(OutBuffer, InSamples, SamplesToDo); /* Apply a delay to all channels, except the front-left and front-right, so * they maintain correct timing. */ const size_t NumChannels{OutBuffer.size()}; for(size_t i{0u};i < NumChannels;i++) { if(i == lidx || i == ridx) continue; auto &DelayBuf = mStablizer->DelayBuf[i]; auto buffer_end = OutBuffer[i].begin() + SamplesToDo; if LIKELY(SamplesToDo >= FrontStablizer::DelayLength) { auto delay_end = std::rotate(OutBuffer[i].begin(), buffer_end - FrontStablizer::DelayLength, buffer_end); std::swap_ranges(OutBuffer[i].begin(), delay_end, DelayBuf.begin()); } else { auto delay_start = std::swap_ranges(OutBuffer[i].begin(), buffer_end, DelayBuf.begin()); std::rotate(DelayBuf.begin(), delay_start, DelayBuf.end()); } } /* Include the side signal for what was just decoded. */ for(size_t i{0};i < SamplesToDo;++i) side[FrontStablizer::DelayLength+i] += OutBuffer[lidx][i] - OutBuffer[ridx][i]; /* Combine the delayed mid signal with the decoded mid signal. */ float *tmpbuf{mStablizer->TempBuf.data()}; auto tmpiter = std::copy(mStablizer->MidDelay.cbegin(), mStablizer->MidDelay.cend(), tmpbuf); for(size_t i{0};i < SamplesToDo;++i,++tmpiter) *tmpiter = OutBuffer[lidx][i] + OutBuffer[ridx][i]; /* Save the newest samples for next time. */ std::copy_n(tmpbuf+SamplesToDo, mStablizer->MidDelay.size(), mStablizer->MidDelay.begin()); /* Apply an all-pass on the signal in reverse. The future samples are * included with the all-pass to reduce the error in the output samples * (the smaller the delay, the more error is introduced). */ mStablizer->MidFilter.applyAllpassRev({tmpbuf, SamplesToDo+FrontStablizer::DelayLength}); /* Now apply the band-splitter, combining its phase shift with the reversed * phase shift, restoring the original phase on the split signal. */ mStablizer->MidFilter.process({tmpbuf, SamplesToDo}, mStablizer->MidHF.data(), mStablizer->MidLF.data()); /* This pans the separate low- and high-frequency signals between being on * the center channel and the left+right channels. The low-frequency signal * is panned 1/3rd toward center and the high-frequency signal is panned * 1/4th toward center. These values can be tweaked. */ const float cos_lf{std::cos(1.0f/3.0f * (al::numbers::pi_v<float>*0.5f))}; const float cos_hf{std::cos(1.0f/4.0f * (al::numbers::pi_v<float>*0.5f))}; const float sin_lf{std::sin(1.0f/3.0f * (al::numbers::pi_v<float>*0.5f))}; const float sin_hf{std::sin(1.0f/4.0f * (al::numbers::pi_v<float>*0.5f))}; for(size_t i{0};i < SamplesToDo;i++) { const float m{mStablizer->MidLF[i]*cos_lf + mStablizer->MidHF[i]*cos_hf + mid[i]}; const float c{mStablizer->MidLF[i]*sin_lf + mStablizer->MidHF[i]*sin_hf}; const float s{side[i]}; /* The generated center channel signal adds to the existing signal, * while the modified left and right channels replace. */ OutBuffer[lidx][i] = (m + s) * 0.5f; OutBuffer[ridx][i] = (m - s) * 0.5f; OutBuffer[cidx][i] += c * 0.5f; } /* Move the delayed mid/side samples to the front for next time. */ auto mid_end = mStablizer->MidDirect.cbegin() + SamplesToDo; std::copy(mid_end, mid_end+FrontStablizer::DelayLength, mStablizer->MidDirect.begin()); auto side_end = mStablizer->Side.cbegin() + SamplesToDo; std::copy(side_end, side_end+FrontStablizer::DelayLength, mStablizer->Side.begin()); } std::unique_ptr<BFormatDec> BFormatDec::Create(const size_t inchans, const al::span<const ChannelDec> coeffs, const al::span<const ChannelDec> coeffslf, const float xover_f0norm, std::unique_ptr<FrontStablizer> stablizer) { return std::make_unique<BFormatDec>(inchans, coeffs, coeffslf, xover_f0norm, std::move(stablizer)); }
def convert_to_human_readable(id_to_word, arr, max_num_to_print): assert arr.ndim == 2 samples = [] for sequence_id in xrange(min(len(arr), max_num_to_print)): buffer_str = ' '.join( [str(id_to_word[index]) for index in arr[sequence_id, :]]) samples.append(buffer_str) return samples
/* Copyright 2021 Gravitational, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package prompt import ( "os" "sync" ) var ( stdinOnce = &sync.Once{} stdin *ContextReader ) // Stdin returns a singleton ContextReader wrapped around os.Stdin. // // os.Stdin should not be used directly after the first call to this function // to avoid losing data. Closing this ContextReader will prevent all future // reads for all callers. func Stdin() *ContextReader { stdinOnce.Do(func() { stdin = NewContextReader(os.Stdin) }) return stdin }
def readResults(filename): resultfile = open(filename, 'r') lines = resultfile.readlines() labels = lines[0].split() resultlist = [[label] for label in labels] lines = lines[1:] for line in lines: templist = line.split() if not templist: continue for col in range(len(resultlist)): resultlist[col].append(float(templist[col])) resultfile.close() return resultlist
<gh_stars>0 package types import ( "encoding/base64" "encoding/json" "fmt" "time" "github.com/KuChainNetwork/kuchain/chain/constants" "github.com/KuChainNetwork/kuchain/singleton" abci "github.com/tendermint/tendermint/abci/types" "github.com/tendermint/tendermint/libs/kv" chaintype "github.com/KuChainNetwork/kuchain/chain/types" "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" rpcclient "github.com/tendermint/tendermint/rpc/client/local" ) type Attribute struct { Key string `json:"key"` Value string `json:"value"` } type EventLog struct { Type string `json:"type"` Attributes []Attribute `json:"attributes"` } type eventsLog struct { MsgIndex int64 `json:"msg_index"` Log string `json:"log"` Events []EventLog `json:"events"` } type rawLog struct { Code uint32 `json:"code"` Data string `json:"data"` Log []eventsLog `json:"log"` Info string `json:"info"` GasWanted int64 `json:"gas_wanted"` GasUsed int64 `json:"gas_used"` CodeSpace string `json:"code_space"` } type fee struct { Amount string `json:"amount" yaml:"amount"` Gas uint64 `json:"gas" yaml:"gas"` Payer string `json:"payer" yaml:"payer"` } func (f fee) ToString() string { bz, _ := json.Marshal(f) return string(bz) } type Txm struct { Height int64 `json:"height"` TxHash []byte `json:"tx_hash"` Msgs []json.RawMessage `json:"msg"` Fee fee `json:"fee"` Signatures []chaintype.StdSignature `json:"signatures"` Memo string `json:"memo"` RawLog rawLog `json:"raw_log"` Time string `json:"time"` Senders []string `json:"senders"` } type ReqTx struct { Txm } func PrettifyJSON(ctx sdk.Context, tx chaintype.StdTx, Cdc *codec.Codec) ([]json.RawMessage, error) { alias := struct { Msgs []json.RawMessage `json:"msgs"` }{ Msgs: make([]json.RawMessage, 0, len(tx.Msgs)), } for _, msg := range tx.Msgs { if msg, ok := msg.(chaintype.Prettifier); ok { raw, err := msg.PrettifyJSON(Cdc) if err != nil { return nil, sdkerrors.Wrapf(err, "prettify json to msg") } alias.Msgs = append(alias.Msgs, raw) } } return alias.Msgs, nil } func RebuildTx(ctx sdk.Context, stdTx chaintype.StdTx, Cdc *codec.Codec, Height int64, Time time.Time, hash []byte, rawLog json.RawMessage) (btx Txm) { if Cdc == nil { return } json.Unmarshal(rawLog, &btx.RawLog) btx.Msgs, _ = PrettifyJSON(ctx, stdTx, Cdc) btx.Height = Height btx.Time = Time.Format("2006-01-02T15:04:05.999999999Z") btx.Memo = stdTx.Memo btx.TxHash = hash btx.Signatures = stdTx.Signatures btx.Senders = GetSenders(stdTx, Cdc) btx.Fee = fee{ Amount: stdTx.Fee.Amount.String(), Gas: stdTx.Fee.Gas, Payer: stdTx.Fee.Payer.String(), } ctx.Logger().Debug("RebuildTx", "hash", base64.StdEncoding.EncodeToString(hash), "btx", btx) return } type ReqTxHandle func(ctxSdk sdk.Context, tx ReqTx) type ReqEventsHandle func(ctxSdk sdk.Context, ev ReqEvents) func makeEventForTxm(aEvent abci.Event) sdk.Event { return sdk.Event{ Type: aEvent.Type, Attributes: aEvent.Attributes, } } func makeFeeEvent(stdTx chaintype.StdTx, height int64, time2 time.Time) (Event sdk.Event) { Event.Type = "payfee" Event.Attributes = append(Event.Attributes, kv.Pair{ Key: []byte("amount"), Value: []byte(stdTx.Fee.Amount.String()), }) Event.Attributes = append(Event.Attributes, kv.Pair{ Key: []byte("from"), Value: []byte(stdTx.Fee.Payer.String()), }) Event.Attributes = append(Event.Attributes, kv.Pair{ Key: []byte("to"), Value: []byte(constants.GetFeeCollector().String()), }) Event.Attributes = append(Event.Attributes, kv.Pair{ Key: []byte("height"), Value: []byte(fmt.Sprintf("%d", height)), }) Event.Attributes = append(Event.Attributes, kv.Pair{ Key: []byte("block_time"), Value: []byte(time2.Format("2006-01-02T15:04:05.999999999Z")), }) return } func PrintEventsLog(ctx sdk.Context, events sdk.Events, Height int64) { logEvents := "" for _, e := range events { logEvents += e.Type + "," for _, ar := range e.Attributes { logEvents += " " + string(ar.Key) + ":" + string(ar.Value) + " " } logEvents += ";" } ctx.Logger().Debug("getEvent", "block_height", Height, "events", logEvents) } func GetTxInfo(ctx sdk.Context, Height int64, Cdc *codec.Codec, handleTx ReqTxHandle, handleEvn ReqEventsHandle) (t time.Time) { if singleton.NodeInst == nil { ctx.Logger().Debug("GetTxInfo", "types2.PNode", singleton.NodeInst) return } t = singleton.NodeInst.BlockStore().LoadBlock(Height).Time if Height <= 1 { return } Height-- getEvent := func() (events sdk.Events) { ResTx, err := rpcclient.New(singleton.NodeInst).BlockResults(&Height) if err != nil { ctx.Logger().Error("getTx", "err", err) return } for i := 0; i < len(ResTx.BeginBlockEvents); i++ { events = append(events, makeEventForTxm(ResTx.BeginBlockEvents[i])) } PrintEventsLog(ctx, events, Height) return } getTx := func() (raws []json.RawMessage, codes []uint32) { ResTx, err := rpcclient.New(singleton.NodeInst).BlockResults(&Height) if err != nil { ctx.Logger().Error("getTx", "err", err) return } for i := 0; i < len(ResTx.TxsResults); i++ { tr := rawLog{ Code: ResTx.TxsResults[i].Code, Data: string(ResTx.TxsResults[i].Data), Info: ResTx.TxsResults[i].Info, GasWanted: ResTx.TxsResults[i].GasWanted, GasUsed: ResTx.TxsResults[i].GasUsed, CodeSpace: ResTx.TxsResults[i].Codespace, } json.Unmarshal([]byte(ResTx.TxsResults[i].Log), &tr.Log) bz, _ := json.Marshal(tr) raws = append(raws, bz) codes = append(codes, tr.Code) } ctx.Logger().Debug("getTx", "block_height", Height, "raws", raws) return } getTxInfo := func() error { raws, _ := getTx() var FeeEvents sdk.Events block := singleton.NodeInst.BlockStore().LoadBlock(Height) for i := 0; i < len(block.Data.Txs); i++ { var stdTx chaintype.StdTx err := Cdc.UnmarshalBinaryLengthPrefixed(block.Data.Txs[i], &stdTx) if err == nil { handleTx(ctx, ReqTx{Txm: RebuildTx(ctx, stdTx, Cdc, block.Height, block.Time, block.Data.Txs[i].Hash(), raws[i])}) } else { ctx.Logger().Error("GetTxInfo", "err", err) return err } FeeEvents = append(FeeEvents, makeFeeEvent(stdTx, block.Height, block.Time)) } handleEvn(ctx, ReqEvents{ BlockHeight: block.Height, Events: getEvent(), }) PrintEventsLog(ctx, FeeEvents, Height) handleEvn(ctx, ReqEvents{ BlockHeight: block.Height, Events: FeeEvents, }) return nil } getTxInfo() return } func GetSenders(tx chaintype.StdTx, Cdc *codec.Codec) (senders []string) { for _, msg := range tx.Msgs { if msg, ok := msg.(chaintype.KuMsgDataHandler); ok { sender, err := msg.GetSender(Cdc) if err != nil { panic(fmt.Sprintf("get sender failed: %v", err)) } senders = append(senders, sender.String()) } } return senders }
<reponame>ridoo/IlwisCore #ifndef MATHHELPER_H #define MATHHELPER_H namespace Ilwis { struct KERNELSHARED_EXPORT Coefficient{ Coefficient(double x=0, double y=0) : _x(x), _y(y){} double _x,_y; }; class KERNELSHARED_EXPORT MathHelper { public: MathHelper(); static bool findOblique(int iPoints, const std::vector<Coordinate>& independent, const std::vector<Coordinate>& dependent, std::vector<Coefficient>& coef, bool useCols); static bool findPolynom(int iTerms, int iPoints, const std::vector<Coordinate>& independent, const std::vector<Coordinate>& dependent, std::vector<Coefficient>& coef); static NumericRange roundRange(double rmin, double rmax); static double round(double r); }; } #endif // MATHHELPER_H
/* * This function initializes address handle attributes from the incoming packet. * Incoming packet has dgid of the receiver node on which this code is * getting executed and, sgid contains the GID of the sender. * * When resolving mac address of destination, the arrived dgid is used * as sgid and, sgid is used as dgid because sgid contains destinations * GID whom to respond to. * * On success the caller is responsible to call rdma_destroy_ah_attr on the * attr. */ int ib_init_ah_attr_from_wc(struct ib_device *device, u32 port_num, const struct ib_wc *wc, const struct ib_grh *grh, struct rdma_ah_attr *ah_attr) { u32 flow_class; int ret; enum rdma_network_type net_type = RDMA_NETWORK_IB; enum ib_gid_type gid_type = IB_GID_TYPE_IB; const struct ib_gid_attr *sgid_attr; int hoplimit = 0xff; union ib_gid dgid; union ib_gid sgid; might_sleep(); memset(ah_attr, 0, sizeof *ah_attr); ah_attr->type = rdma_ah_find_type(device, port_num); if (rdma_cap_eth_ah(device, port_num)) { if (wc->wc_flags & IB_WC_WITH_NETWORK_HDR_TYPE) net_type = wc->network_hdr_type; else net_type = ib_get_net_type_by_grh(device, port_num, grh); gid_type = ib_network_to_gid_type(net_type); } ret = ib_get_gids_from_rdma_hdr((union rdma_network_hdr *)grh, net_type, &sgid, &dgid); if (ret) return ret; rdma_ah_set_sl(ah_attr, wc->sl); rdma_ah_set_port_num(ah_attr, port_num); if (rdma_protocol_roce(device, port_num)) { u16 vlan_id = wc->wc_flags & IB_WC_WITH_VLAN ? wc->vlan_id : 0xffff; if (!(wc->wc_flags & IB_WC_GRH)) return -EPROTOTYPE; sgid_attr = get_sgid_attr_from_eth(device, port_num, vlan_id, &dgid, gid_type); if (IS_ERR(sgid_attr)) return PTR_ERR(sgid_attr); flow_class = be32_to_cpu(grh->version_tclass_flow); rdma_move_grh_sgid_attr(ah_attr, &sgid, flow_class & 0xFFFFF, hoplimit, (flow_class >> 20) & 0xFF, sgid_attr); ret = ib_resolve_unicast_gid_dmac(device, ah_attr); if (ret) rdma_destroy_ah_attr(ah_attr); return ret; } else { rdma_ah_set_dlid(ah_attr, wc->slid); rdma_ah_set_path_bits(ah_attr, wc->dlid_path_bits); if ((wc->wc_flags & IB_WC_GRH) == 0) return 0; if (dgid.global.interface_id != cpu_to_be64(IB_SA_WELL_KNOWN_GUID)) { sgid_attr = rdma_find_gid_by_port( device, &dgid, IB_GID_TYPE_IB, port_num, NULL); } else sgid_attr = rdma_get_gid_attr(device, port_num, 0); if (IS_ERR(sgid_attr)) return PTR_ERR(sgid_attr); flow_class = be32_to_cpu(grh->version_tclass_flow); rdma_move_grh_sgid_attr(ah_attr, &sgid, flow_class & 0xFFFFF, hoplimit, (flow_class >> 20) & 0xFF, sgid_attr); return 0; } }
The American Civil Liberties Union believes the death penalty inherently violates the constitutional ban against cruel and unusual punishment and the guarantees of due process of law and of equal protection under the law. Furthermore, we believe that the state should not give itself the right to kill human beings – especially when it kills with premeditation and ceremony, in the name of the law or in the name of its people, and when it does so in an arbitrary and discriminatory fashion. Capital punishment is an intolerable denial of civil liberties and is inconsistent with the fundamental values of our democratic system. The death penalty is uncivilized in theory and unfair and inequitable in practice. Through litigation, legislation, and advocacy against this barbaric and brutal institution, we strive to prevent executions and seek the abolition of capital punishment. The ACLU’s opposition to capital punishment incorporates the following fundamental concerns: The death penalty system in the US is applied in an unfair and unjust manner against people, largely dependent on how much money they have, the skill of their attorneys, race of the victim and where the crime took place . People of color are far more likely to be executed than white people, especially if thevictim is white The death penalty is a waste of taxpayer funds and has no public safety benefit. The vast majority of law enforcement professionals surveyed agree that capital punishment does not deter violent crime; a survey of police chiefs nationwide found they rank the death penalty lowest among ways to reduce violent crime. They ranked increasing the number of police officers, reducing drug abuse, and creating a better economy with more jobs higher than the death penalty as the best ways to reduce violence. The FBI has found the states with the death penalty have the highest murder rates. Innocent people are too often sentenced to death. Since 1973, over 156 people have been released from death rows in 26 states because of innocence. Nationally, at least one person is exonerated for every 10 that are executed. INTRODUCTION TO THE “MODERN ERA” OF THE DEATH PENALTY IN THE UNITED STATES In 1972, the Supreme Court declared that under then-existing laws "the imposition and carrying out of the death penalty… constitutes cruel and unusual punishment in violation of the Eighth and Fourteenth Amendments." (Furman v. Georgia, 408 U.S. 238). The Court, concentrating its objections on the manner in which death penalty laws had been applied, found the result so "harsh, freakish, and arbitrary" as to be constitutionally unacceptable. Making the nationwide impact of its decision unmistakable, the Court summarily reversed death sentences in the many cases then before it, which involved a wide range of state statutes, crimes and factual situations. But within four years after the Furman decision, several hundred persons had been sentenced to death under new state capital punishment statutes written to provide guidance to juries in sentencing. These statutes require a two-stage trial procedure, in which the jury first determines guilt or innocence and then chooses imprisonment or death in the light of aggravating or mitigating circumstances. In 1976, the Supreme Court moved away from abolition, holding that "the punishment of death does not invariably violate the Constitution." The Court ruled that the new death penalty statutes contained "objective standards to guide, regularize, and make rationally reviewable the process for imposing the sentence of death." (Gregg v. Georgia, 428 U.S. 153). Subsequently 38 state legislatures and the Federal government enacted death penalty statutes patterned after those the Court upheld in Gregg. Congress also enacted and expanded federal death penalty statutes for peacetime espionage by military personnel and for a vast range of categories of murder. Executions resumed in 1977. In 2002, the Supreme Court held executions of mentally retarded criminals are “cruel and unusual punishments” prohibited by the Eighth Amendment to the Constitution. Since then, states have developed a range of processes to ensure that mentally retarded individuals are not executed. Many have elected to hold proceedings prior to the merits trial, many with juries, to determine whether an accused is mentally retarded. In 2005, the Supreme Court held that the Eighth and Fourteenth Amendments to the Constitution forbid imposition of the death penalty on offenders who were under the age of 18 when their crimes were committed, resulting in commutation of death sentences to life for dozens of individuals across the country. As of August 2012, over 3,200 men and women are under a death sentence and more than 1,300 men, women and children (at the time of the crime) have been executed since 1976. ACLU OBJECTIONS TO THE DEATH PENALTY Despite the Supreme Court's 1976 ruling in Gregg v. Georgia, et al, the ACLU continues to oppose capital punishment on moral, practical, and constitutional grounds: Capital punishment is cruel and unusual. It is cruel because it is a relic of the earliest days of penology, when slavery, branding, and other corporal punishments were commonplace. Like those barbaric practices, executions have no place in a civilized society. It is unusual because only the United States of all the western industrialized nations engages in this punishment. It is also unusual because only a random sampling of convicted murderers in the United States receive a sentence of death. Capital punishment denies due process of law. Its imposition is often arbitrary, and always irrevocable – forever depriving an individual of the opportunity to benefit from new evidence or new laws that might warrant the reversal of a conviction, or the setting aside of a death sentence. The death penalty violates the constitutional guarantee of equal protection. It is applied randomly – and discriminatorily. It is imposed disproportionately upon those whose victims are white, offenders who are people of color, and on those who are poor and uneducated and concentrated in certain geographic regions of the country. The death penalty is not a viable form of crime control. When police chiefs were asked to rank the factors that, in their judgment, reduce the rate of violent crime, they mentioned curbing drug use and putting more officers on the street, longer sentences and gun control. They ranked the death penalty as least effective. Politicians who preach the desirability of executions as a method of crime control deceive the public and mask their own failure to identify and confront the true causes of crime. Capital punishment wastes limited resources. It squanders the time and energy of courts, prosecuting attorneys, defense counsel, juries, and courtroom and law enforcement personnel. It unduly burdens the criminal justice system, and it is thus counterproductive as an instrument for society's control of violent crime. Limited funds that could be used to prevent and solve crime (and provide education and jobs) are spent on capital punishment. Opposing the death penalty does not indicate a lack of sympathy for murder victims. On the contrary, murder demonstrates a lack of respect for human life. Because life is precious and death irrevocable, murder is abhorrent, and a policy of state-authorized killings is immoral. It epitomizes the tragic inefficacy and brutality of violence, rather than reason, as the solution to difficult social problems. Many murder victims do not support state-sponsored violence to avenge the death of their loved one. Sadly, these victims have often been marginalized by politicians and prosecutors, who would rather publicize the opinions of pro-death penalty family members. Changes in death sentencing have proved to be largely cosmetic. The defects in death-penalty laws, conceded by the Supreme Court in the early 1970s, have not been appreciably altered by the shift from unrestrained discretion to "guided discretion." Such so-called “reforms” in death sentencing merely mask the impermissible randomness of a process that results in an execution. A society that respects life does not deliberately kill human beings. An execution is a violent public spectacle of official homicide, and one that endorses killing to solve social problems – the worst possible example to set for the citizenry, and especially children. Governments worldwide have often attempted to justify their lethal fury by extolling the purported benefits that such killing would bring to the rest of society. The benefits of capital punishment are illusory, but the bloodshed and the resulting destruction of community decency are real. CAPITAL PUNISHMENT IS NOT A DETERRENT TO CAPITAL CRIMES Deterrence is a function not only of a punishment's severity, but also of its certainty and frequency. The argument most often cited in support of capital punishment is that the threat of execution influences criminal behavior more effectively than imprisonment does. As plausible as this claim may sound, in actuality the death penalty fails as a deterrent for several reasons. A punishment can be an effective deterrent only if it is consistently and promptly employed. Capital punishment cannot be administered to meet these conditions. The proportion of first-degree murderers who are sentenced to death is small, and of this group, an even smaller proportion of people are executed. Although death sentences in the mid-1990s increased to about 300 per year, this is still only about one percent of all homicides known to the police. Of all those convicted on a charge of criminal homicide, only 3 percent – about 1 in 33 – are eventually sentenced to death. Between 2001-2009, the average number of death sentences per year dropped to 137, reducing the percentage even more. This tiny fraction of convicted murderers do not represent the “worst of the worst”. Mandatory death sentencing is unconstitutional. The possibility of increasing the number of convicted murderers sentenced to death and executed by enacting mandatory death penalty laws was ruled unconstitutional in 1976 (Woodson v. North Carolina, 428 U.S. 280). A considerable time between the imposition of the death sentence and the actual execution is unavoidable, given the procedural safeguards required by the courts in capital cases. Starting with selecting the trial jury, murder trials take far longer when the ultimate penalty is involved. Furthermore, post-conviction appeals in death-penalty cases are far more frequent than in other cases. These factors increase the time and cost of administering criminal justice. We can reduce delay and costs only by abandoning the procedural safeguards and constitutional rights of suspects, defendants, and convicts – with the attendant high risk of convicting the wrong person and executing the innocent. This is not a realistic prospect: our legal system will never reverse itself to deny defendants the right to counsel, or the right to an appeal. Persons who commit murder and other crimes of personal violence often do not premeditate their crimes. Most capital crimes are committed in the heat of the moment. Most capital crimes are committed during moments of great emotional stress or under the influence of drugs or alcohol, when logical thinking has been suspended. Many capital crimes are committed by the badly emotionally-damaged or mentally ill. In such cases, violence is inflicted by persons unable to appreciate the consequences to themselves as well as to others. Even when crime is planned, the criminal ordinarily concentrates on escaping detection, arrest, and conviction. The threat of even the severest punishment will not discourage those who expect to escape detection and arrest. It is impossible to imagine how the threat of any punishment could prevent a crime that is not premeditated. Furthermore, the death penalty is a futile threat for political terrorists, like Timothy McVeigh, because they usually act in the name of an ideology that honors its martyrs. Capital punishment doesn't solve our society's crime problem. Threatening capital punishment leaves the underlying causes of crime unaddressed, and ignores the many political and diplomatic sanctions (such as treaties against asylum for international terrorists) that could appreciably lower the incidence of terrorism. Capital punishment has been a useless weapon in the so-called "war on drugs." The attempt to reduce murders in the drug trade by threat of severe punishment ignores the fact that anyone trafficking in illegal drugs is already risking his life in violent competition with other dealers. It is irrational to think that the death penalty – a remote threat at best – will avert murders committed in drug turf wars or by street-level dealers. If, however, severe punishment can deter crime, then permanent imprisonment is severe enough to deter any rational person from committing a violent crime. The vast preponderance of the evidence shows that the death penalty is no more effective than imprisonment in deterring murder and that it may even be an incitement to criminal violence. Death-penalty states as a group do not have lower rates of criminal homicide than non-death-penalty states. Use of the death penalty in a given state may actually increase the subsequent rate of criminal homicide. Why? Perhaps because "a return to the exercise of the death penalty weakens socially based inhibitions against the use of lethal force to settle disputes…. " In adjacent states – one with the death penalty and the other without it – the state that practices the death penalty does not always show a consistently lower rate of criminal homicide. For example, between l990 and l994, the homicide rates in Wisconsin and Iowa (non-death-penalty states) were half the rates of their neighbor, Illinois – which restored the death penalty in l973, and by 1994 had sentenced 223 persons to death and carried out two executions. Between 2000-2010, the murder rate in states with capital punishment was 25-46% higher than states without the death penalty. On-duty police officers do not suffer a higher rate of criminal assault and homicide in abolitionist states than they do in death-penalty states. Between 1976 and 1989, for example, lethal assaults against police were not significantly more or less frequent in abolitionist states than in death-penalty states. Capital punishment did not appear to provide officers added protection during that time frame. In fact, the three leading states in law enforcement homicide in 1996 were also very active death penalty states: California (highest death row population), Texas (most executions since 1976), and Florida (third highest in executions and death row population). The South, which accounts for more than 80% of the country’s executions, also has the highest murder rate of any region in the country. If anything, the death penalty incited violence rather than curbed it. Prisoners and prison personnel do not suffer a higher rate of criminal assault and homicide from life-term prisoners in abolition states than they do in death-penalty states. Between 1992 and 1995, 176 inmates were murdered by other prisoners. The vast majority of those inmates (84%) were killed in death penalty jurisdictions. During the same period, about 2% of all inmate assaults on prison staff were committed in abolition jurisdictions. Evidently, the threat of the death penalty "does not even exert an incremental deterrent effect over the threat of a lesser punishment in the abolitionist states." Furthermore, multiple studies have shown that prisoners sentenced to life without parole have equivalent rates of prison violence as compared to other inmates. Actual experience thus establishes beyond a reasonable doubt that the death penalty does not deter murder. No comparable body of evidence contradicts that conclusion. Furthermore, there are documented cases in which the death penalty actually incited the capital crimes it was supposed to deter. These include instances of the so-called suicide-by-execution syndrome – persons who wanted to die but feared taking their own lives, and committed murder so that the state would kill them. For example, in 1996, Daniel Colwell, who suffered from mental illness, claimed that he killed a randomly-selected couple in a Georgia parking lot so that the state would kill him – he was sentenced to death and ultimately took his own life while on death row. Although inflicting the death penalty guarantees that the condemned person will commit no further crimes, it does not have a demonstrable deterrent effect on other individuals. Further, it is a high price to pay when studies show that few convicted murderers commit further crimes of violence. Researchers examined the prison and post-release records of 533 prisoners on death row in 1972 whose sentences were reduced to incarceration for life by the Supreme Court's ruling in Furman. This research showed that seven had committed another murder. But the same study showed that in four other cases, an innocent man had been sentenced to death. (Marquart and Sorensen, in Loyola of Los Angeles Law Review 1989) Recidivism among murderers does occasionally happen, but it occurs less frequently than most people believe; the media rarely distinguish between a convicted offender who murders while on parole, and a paroled murderer who murders again. Government data show that about one in 12 death row prisoners had a prior homicide conviction. But as there is no way to predict reliably which convicted murderers will try to kill again, the only way to prevent all such recidivism is to execute every convicted murderer – a policy no one seriously advocates. Equally effective but far less inhumane is a policy of life imprisonment without the possibility of parole. CAPITAL PUNISHMENT IS UNFAIR Constitutional due process and elementary justice both require that the judicial functions of trial and sentencing be conducted with fundamental fairness, especially where the irreversible sanction of the death penalty is involved. In murder cases (since 1930, 88 percent of all executions have been for this crime), there has been substantial evidence to show that courts have sentenced some persons to prison while putting others to death in a manner that has been arbitrary, racially biased, and unfair. Racial Bias in Death Sentencing Racial discrimination was one of the grounds on which the Supreme Court ruled the death penalty unconstitutional in Furman. Half a century ago, in his classic American Dilemma (1944), Gunnar Myrdal reported that "the South makes the widest application of the death penalty, and Negro criminals come in for much more than their share of the executions." A study of the death penalty in Texas shows that the current capital punishment system is an outgrowth of the racist "legacy of slavery." Between 1930 and the end of 1996, 4,220 prisoners were executed in the United States; more than half (53%) were black. Our nation's death rows have always held a disproportionately large population of African Americans, relative to their percentage of the total population. Comparing black and white offenders over the past century, the former were often executed for what were considered less-than-capital offenses for whites, such as rape and burglary. (Between 1930 and 1976, 455 men were executed for rape, of whom 405 – 90 percent – were black.) A higher percentage of the blacks who were executed were juveniles; and the rate of execution without having one's conviction reviewed by any higher court was higher for blacks. (Bowers, Legal Homicide 1984; Streib, Death Penalty for Juveniles 1987) In recent years, it has been argued that such flagrant racial discrimination is a thing of the past. However, since the revival of the death penalty in the mid-1970s, about half of those on death row at any given time have been black. More striking is the racial comparison of victims. Although approximately 49% of all homicide victims are white, 77% of capital homicide cases since 1976 have involved a white victim. Between 1976 and 2005, 86% of white victims were killed by whites (14% by other races) while 94% of black victims were killed by blacks (6% by other races). Blacks and whites are murder victims in almost equal numbers of crimes – which is a very high percentage given that the general US population is 13% black. African-Americans are six times as likely as white Americans to die at the hands of a murderer, and roughly seven times as likely to murder someone. Young black men are fifteen times as likely to be murdered as young white men. So given this information, when those under death sentence are examined more closely, it turns out that race is a decisive factor after all. Further, studies like that commissioned by the Governor of Maryland found that “black offenders who kill white victims are at greater risk of a death sentence than others, primarily because they are substantially more likely to be charged by the state’s attorney with a capital offense.” The classic statistical study of racial discrimination in capital cases in Georgia presented in the McCleskey case showed that "the average odds of receiving a death sentence among all indicted cases were 4.3 times higher in cases with white victims." (David C. Baldus et al., Equal Justice and the Death Penalty 1990) In 1987 these data were placed before the Supreme Court in McCleskey v. Kemp and while the Court did not dispute the statistical evidence, it held that evidence of an overall pattern of racial bias was not sufficient. Mr. McCleskey would have to prove racial bias in his own case – a virtually impossible task. The Court also held that the evidence failed to show that there was "a constitutionally significant risk of racial bias...." (481 U.S. 279) Although the Supreme Court declared that the remedy sought by the plaintiff was "best presented to the legislative bodies," subsequent efforts to persuade Congress to remedy the problem by enacting the Racial Justice Act were not successful. (Don Edwards & John Conyers, Jr., The Racial Justice Act – A Simple Matter of Justice, in University of Dayton Law Review 1995) In 1990, the U.S. General Accounting Office reported to the Congress the results of its review of empirical studies on racism and the death penalty. The GAO concluded: "Our synthesis of the 28 studies shows a pattern of evidence indicating racial disparities in the charging, sentencing, and imposition of the death penalty after the Furman decision" and that "race of victim influence was found at all stages of the criminal justice system process..." Texas was prepared to execute Duane Buck on September 15, 2011. Mr. Buck was condemned to death by a jury that had been told by an expert psychologist that he was more likely to be dangerous because he was African American. The Supreme Court stayed the case, but Mr. Buck has not yet received the new sentencing hearing justice requires. These results cannot be explained away by relevant non-racial factors, such as prior criminal record or type of crime, as these were factored for in the Baldus and GAO studies referred to above. They lead to a very unsavory conclusion: In the trial courts of this nation, even at the present time, the killing of a white person is treated much more severely than the killing of a black person. Of the 313 persons executed between January 1977 and the end of 1995, 36 had been convicted of killing a black person while 249 (80%) had killed a white person. Of the 178 white defendants executed, only three had been convicted of murdering people of color. Our criminal justice system essentially reserves the death penalty for murderers (regardless of their race) who kill white victims. Another recent Louisiana study found that defendants with white victims were 97% more likely to receive death sentences than defendants with black victims.[1] Both gender and socio-economic class also determine who receives a death sentence and who is executed. Women account for only two percent of all people sentenced to death, even though females commit about 11 percent of all criminal homicides. Many of the women under death sentence were guilty of killing men who had victimized them with years of violent abuse. Since 1900, only 51 women have been executed in the United States (15 of them black). Discrimination against the poor (and in our society, racial minorities are disproportionately poor) is also well established. It is a prominent factor in the availability of counsel. Fairness in capital cases requires, above all, competent counsel for the defendant. Yet "approximately 90 percent of those on death row could not afford to hire a lawyer when they were tried.") Common characteristics of death-row defendants are poverty, the lack of firm social roots in the community, and inadequate legal representation at trial or on appeal. As Justice William O. Douglas noted in Furman, "One searches our chronicles in vain for the execution of any member of the affluent strata in this society"(408 US 238). Failure of Safeguards The demonstrated inequities in the actual administration of capital punishment should tip the balance against it in the judgment of fair-minded and impartial observers. "Whatever else might be said for the use of death as a punishment, one lesson is clear from experience: this is a power that we cannot exercise fairly and without discrimination."(Gross and Mauro, Death and Discrimination 1989) Justice John Marshall Harlan, writing for the Court in Furman, noted "… the history of capital punishment for homicides … reveals continual efforts, uniformly unsuccessful, to identify before the fact those homicides for which the slayer should die…. Those who have come to grips with the hard task of actually attempting to draft means of channeling capital sentencing discretion have confirmed the lesson taught by history…. To identify before the fact those characteristics of criminal homicides and their perpetrators which call for the death penalty, and to express these characteristics in language which can be fairly understood and applied by the sentencing authority, appear to be tasks which are beyond present human ability." (402 U.S. 183 (1971)) Yet in the Gregg decision, the majority of the Supreme Court abandoned the wisdom of Justice Harlan and ruled as though the new guided-discretion statutes could accomplish the impossible. The truth is that death statutes approved by the Court "do not effectively restrict the discretion of juries by any real standards, and they never will. No society is going to kill everybody who meets certain preset verbal requirements, put on the statute books without awareness of coverage of the infinity of special factors the real world can produce." Evidence obtained by the Capital Jury Project has shown that jurors in capital trials generally do not understand the judge's instructions about the laws that govern the choice between imposing the death penalty and a life sentence. Even when they do comprehend, jurors often refuse to be guided by the law. "Juror comprehension of the law… is mediocre. The effect [of this relative lack of comprehension of the law]… is to reduce the likelihood that capital defendants will benefit from the safeguards against arbitrariness built into the… law." Even if the jury's sentencing decision were strictly governed by the relevant legal criteria, there remains a vast reservoir of unfettered discretion: the prosecutor's decision to prosecute for a capital or lesser crime, the court's willingness to accept or reject a guilty plea, the jury's decision to convict for second-degree murder or manslaughter rather than capital murder, the determination of the defendant's sanity, and the governor's final clemency decision, among others. Discretion in the criminal justice system is unavoidable. The history of capital punishment in America clearly demonstrates the social desire to mitigate the harshness of the death penalty by narrowing the scope of its application. Whether or not explicitly authorized by statutes, sentencing discretion has been the main vehicle to this end. But when sentencing discretion is used – as it too often has been – to doom the poor, the friendless, the uneducated, racial minorities, and the despised, it becomes injustice. Mindful of such facts, the House of Delegates of the American Bar Association (including 20 out of 24 former presidents of the ABA) called for a moratorium on all executions by a vote of 280 to 119 in February 1997. The House judged the current system to be "a haphazard maze of unfair practices." In its 1996 survey of the death penalty in the United States, the International Commission of Jurists reinforced this point. Despite the efforts made over the past two decades since Gregg to protect the administration of the death penalty from abuses, the actual "constitutional errors committed in state courts have gravely undermined the legitimacy of the death penalty as a punishment for crime." (International Commission of Jurists, Administration of the Death Penalty in the United States 1996) In 2009, the American Law Institute (ALI), the leading independent organization in the U.S. producing scholarly work to clarify, modernize and improve the law, removed capital punishment from its Model Penal Code. The ALI, which created the modern legal framework for the death penalty in 1962, indicated that the punishment is so arbitrary, fraught with racial and economic disparities, and unable to assure quality legal representation for indigent capital defendants, that it can never be administered fairly. Thoughtful citizens, who might possibly support the abstract notion of capital punishment, are obliged to condemn it in actual practice. CAPITAL PUNISHMENT IS IRREVERSIBLE Unlike any other criminal punishments, the death penalty is irrevocable. Speaking to the French Chamber of Deputies in 1830, years after having witnessed the excesses of the French Revolution, the Marquis de Lafayette said, "I shall ask for the abolition of the punishment of death until I have the infallibility of human judgment demonstrated to me." Although some proponents of capital punishment would argue that its merits are worth the occasional execution of innocent people, most would hasten to insist that there is little likelihood of the innocent being executed. Since 1900, in this country, there have been on the average more than four cases each year in which an entirely innocent person was convicted of murder. Scores of these individuals were sentenced to death. In many cases, a reprieve or commutation arrived just hours, or even minutes, before the scheduled execution. These erroneous convictions have occurred in virtually every jurisdiction from one end of the nation to the other. Nor have they declined in recent years, despite the new death penalty statutes approved by the Supreme Court. Disturbingly, and increasingly, a large body of evidence from the modern era shows that innocent people are often convicted of crimes – including capital crimes – and that some have been executed. In 2012, a new report in the Columbia Human Rights Law Review chronicled the horrifying case of Carlos DeLuna, a man executed in Texas in 1989 for a murder that it was “common knowledge” had been committed by another man.[2] DeLuna’s story demonstrates so many of the factors that can go wrong in a capital case: faulty eyewitness identification, prosecutorial misconduct, police misconduct, a botched crime scene, destroyed DNA evidence, a poor person represented by ineffective by an ineffective inexperienced defense attorney overmatched by a professional prosecutor, and insufficient oversight from the bench.[3] In its case against DeLuna, the State presented no blood or DNA evidence, no crime scene fingerprints, and no proof of hair or fibers from the victim having been found on the defendant. He was convicted largely based on eyewitness testimony made from the back of a police car in a dimly lit lot near the crime scene. Meanwhile, a violent criminal named Carlos Hernandez—a man who not only shared DeLuna’s name, but also looked like him—repeatedly boasted about how he had committed the murder and gotten away with it.[4] These disturbing facts about DeLuna’s case, brought to light more than two decades after his execution, refute the claim, made by some proponents of capital punishment, that the United States has never executed an innocent person.[5] Consider this additional handful of cases of innocent people sentenced to die – some executed and some spared: In 2011, the state of Georgia executed Troy Davis, a Black man who was almost certainly innocent of the murder of a white off-duty police officer. The circumstances of his execution raised an international outcry, for good reason. Davis was convicted based on eyewitness testimony, since there was no murder weapon or physical evidence presented by the prosecution. Seven of the nine eyewitnesses recanted or contradicted their trial testimony, many of them saying they were pressured or threatened by police at the time. Troy Davis came close to execution three previous times, because of the difficulty of getting any court to listen to new evidence casting doubt on his conviction. After passage of a federal law in 1996, petitioners are very limited in their ability to appeal death sentences, and courts routinely refuse to hear new testimony, even evidence of innocence. When Troy Davis finally did get a hearing on his evidence, the judge required “proof of innocence” – an impossibly high standard which he ruled that Mr. Davis did not meet. Despite the overwhelming call for clemency, supposed to be the “fail-safe” of the death penalty system, the Georgia Board of Pardons refused to commute the sentence to life and Mr. Davis was executed. Only one day after Troy Davis was executed, two men were freed by the special Innocence Commission of North Carolina after a decade apiece in prison. The two men had actually pled guilty to a crime they did not commit, because they were threatened with the death penalty. In Texas in 2004, Cameron Todd Willingham was executed for the arson-murder of his three children. Independent investigations by a newspaper, a nonprofit organization using top experts in the field of fire science, and an independent expert hired by the State of Texas all found that accident, not arson was the cause of the fire. There simply was no reliable evidence that the children were murdered. Yet even with these reports in hand, the state of Texas executed Mr. Willingham. Earlier this year, the Texas Forensic Science Commission was poised to issue a report officially confirming these conclusions until Texas Governor Rick Perry replaced the Commission’s chair and some of its members. Cameron Todd Willingham, who claimed innocence all along, was executed for a crime he almost certainly did not commit. As an example of the arbitrariness of the death penalty, another man, Ernest Willis, also convicted of arson-murder on the same sort of flimsy and unscientific testimony, was freed from Texas death row six months after Willingham was executed. In 1985, in Maryland, Kirk Bloodsworth was sentenced to death for rape and murder, despite the testimony of alibi witnesses. In 1986 his conviction was reversed on grounds of withheld evidence pointing to another suspect; he was retried, re-convicted, and sentenced to life in prison. In 1993, newly available DNA evidence proved he was not the rapist-killer, and he was released after the prosecution dismissed the case. A year later he was awarded $300,000 for wrongful punishment. Years later the DNA was matched to the real killer. In Mississippi, in 1990, Sabrina Butler was sentenced to death for killing her baby boy. She claimed the child died after attempts at resuscitation failed. On technical grounds her conviction was reversed in 1992. At retrial, she was acquitted when a neighbor corroborated Butler's explanation of the child's cause of death and the physician who performed the autopsy admitted his work had not been thorough. In 1990, Jesse Tafero was executed in Florida. He had been convicted in 1976 along with his wife, Sonia Jacobs, for murdering a state trooper. In 1981 Jacobs' death sentence was reduced on appeal to life imprisonment, and 11 years later her conviction was vacated by a federal court. The evidence on which Tafero and Jacobs had been convicted and sentenced was identical; it consisted mainly of the perjured testimony of an ex-convict who turned state's witness in order to avoid a death sentence. Had Tafero been alive in 1992, he no doubt would have been released along with Jacobs. Tafero’s execution went horribly wrong, and his head caught on fire during the electrocution. In Alabama, Walter McMillian was convicted of murdering a white woman in 1988. Despite the jury's recommendation of a life sentence, the judge sentenced him to death. The sole evidence leading the police to arrest McMillian was testimony of an ex-convict seeking favor with the prosecution. A dozen alibi witnesses (all African Americans, like McMillian) testified on McMillian's behalf that they were together at a neighborhood gathering, to no avail. On appeal, after tireless efforts by his attorney Bryan Stevenson, McMillian's conviction was reversed by the Alabama Court of Appeals. Stevenson uncovered prosecutorial suppression of exculpatory evidence and perjury by prosecution witnesses, and the new district attorney joined the defense in seeking dismissal of the charges. In 1985, in Illinois, Rolando Cruz and Alejandro Hernandez were convicted of abduction, rape, and murder of a young girl and were sentenced to death. Shortly after, another man serving a life term in prison for similar crimes confessed that he alone was guilty; but his confession was inadmissible because he refused to repeat it in court unless the state waived the death penalty against him. Awarded a new trial in 1988, Cruz was again convicted and sentenced to death; Hernandez was also re-convicted, and sentenced to 80 years in prison. In 1992 the assistant attorney general assigned to prosecute the case on appeal resigned after becoming convinced of the defendants' innocence. The convictions were again overturned on appeal after DNA tests exonerated Cruz and implicated the prisoner who had earlier confessed. In 1995 the court ordered a directed verdict of acquittal, and sharply criticized the police for their unprofessional handling of the case. Hernandez was released on bail and the prosecution dropped all charges. In 1980 in Texas a black high school janitor, Clarence Brandley, and his white co-worker found the body of a missing 16-year-old white schoolgirl. Interrogated by the police, they were told, "One of you two is going to hang for this." Looking at Brandley, the officer said, "Since you're the nigger, you're elected." In a classic case of rush to judgment, Brandley was tried, convicted, and sentenced to death. The circumstantial evidence against him was thin, other leads were ignored by the police, and the courtroom atmosphere reeked of racism. In 1986, Centurion Ministries – a volunteer group devoted to freeing wrongly convicted prisoners – came to Brandley's aid. Evidence had meanwhile emerged that another man had committed the murder for which Brandley was awaiting execution. Brandley was not released until 1990. (Davies, White Lies 1991) This sample of freakish and arbitrary innocence determinations also speaks directly to the unceasing concern that there are many more innocent people on death rows across the country – as well as who have been executed. Several factors seen in the above sample of cases help explain why the judicial system cannot guarantee that justice will never miscarry: overzealous prosecution, mistaken or perjured testimony, race, faulty police work, coerced confessions, the defendant's previous criminal record, inept and under-resourced defense counsel, seemingly conclusive circumstantial evidence, and community pressure for a conviction, among others. And when the system does go wrong, it is often volunteers from outside the criminal justice system – journalists, for example – who rectify the errors, not the police or prosecutors. To retain the death penalty in the face of the demonstrable failures of the system is unacceptable, especially since there are no strong overriding reasons to favor the death penalty. CAPITAL PUNISHMENT IS BARBARIC Prisoners are executed in the United States by any one of five methods; in a few jurisdictions the prisoner is allowed to choose which one he or she prefers: The traditional mode of execution, hanging, is an option still available in Delaware, New Hampshire and Washington. Death on the gallows is easily bungled: If the drop is too short, there will be a slow and agonizing death by strangulation. If the drop is too long, the head will be torn off. Two states, Idaho and Utah, still authorize the firing squad. The prisoner is strapped into a chair and hooded. A target is pinned to the chest. Five marksmen, one with blanks, take aim and fire. Throughout the twentieth century, electrocution has been the most widely used form of execution in this country, and is still utilized in eleven states, although lethal injection is the primary method of execution. The condemned prisoner is led – or dragged – into the death chamber, strapped into the chair, and electrodes are fastened to head and legs. When the switch is thrown the body strains, jolting as the voltage is raised and lowered. Often smoke rises from the head. There is the awful odor of burning flesh. No one knows how long electrocuted individuals retain consciousness. In 1983, the electrocution of John Evans in Alabama was described by an eyewitness as follows: "At 8:30 p.m. the first jolt of 1900 volts of electricity passed through Mr. Evans' body. It lasted thirty seconds. Sparks and flames erupted … from the electrode tied to Mr. Evans' left leg. His body slammed against the straps holding him in the electric chair and his fist clenched permanently. The electrode apparently burst from the strap holding it in place. A large puff of grayish smoke and sparks poured out from under the hood that covered Mr. Evans' face. An overpowering stench of burnt flesh and clothing began pervading the witness room. Two doctors examined Mr. Evans and declared that he was not dead. "The electrode on the left leg was re-fastened. …Mr. Evans was administered a second thirty second jolt of electricity. The stench of burning flesh was nauseating. More smoke emanated from his leg and head. Again, the doctors examined Mr. Evans. [They] reported that his heart was still beating, and that he was still alive. At that time, I asked the prison commissioner, who was communicating on an open telephone line to Governor George Wallace, to grant clemency on the grounds that Mr. Evans was being subjected to cruel and unusual punishment. The request …was denied. "At 8:40 p.m., a third charge of electricity, thirty seconds in duration, was passed through Mr. Evans' body. At 8:44, the doctors pronounced him dead. The execution of John Evans took fourteen minutes." Afterwards, officials were embarrassed by what one observer called the "barbaric ritual." The prison spokesman remarked, "This was supposed to be a very clean manner of administering death." The introduction of the gas chamber was an attempt to improve on electrocution. In this method of execution the prisoner is strapped into a chair with a container of sulfuric acid underneath. The chamber is sealed, and cyanide is dropped into the acid to form a lethal gas. Execution by suffocation in the lethal gas chamber has not been abolished but lethal injection serves as the primary method in states which still authorize it. In 1996 a panel of judges on the 9th Circuit Court of Appeals in California (where the gas chamber has been used since 1933) ruled that this method is a "cruel and unusual punishment." Here is an account of the 1992 execution in Arizona of Don Harding, as reported in the dissent by U.S. Supreme Court Justice John Paul Stevens: "When the fumes enveloped Don's head he took a quick breath. A few seconds later he again looked in my direction. His face was red and contorted as if he were attempting to fight through tremendous pain. His mouth was pursed shut and his jaw was clenched tight. Don then took several more quick gulps of the fumes. "At this point Don's body started convulsing violently.... His face and body turned a deep red and the veins in his temple and neck began to bulge until I thought they might explode. After about a minute Don's face leaned partially forward, but he was still conscious. Every few seconds he continued to gulp in. He was shuddering uncontrollably and his body was racked with spasms. His head continued to snap back. His hands were clenched. "After several more minutes, the most violent of the convulsions subsided. At this time the muscles along Don's left arm and back began twitching in a wavelike motion under his skin. Spittle drooled from his mouth. "Don did not stop moving for approximately eight minutes, and after that he continued to twitch and jerk for another minute. Approximately two minutes later, we were told by a prison official that the execution was complete. “Don Harding took ten minutes and thirty one seconds to die." (Gomez v. U.S. District Court, 112 S.Ct. 1652) The latest mode of inflicting the death penalty, enacted into law by more than 30 states, is lethal injection, first used in 1982 in Texas. It is easy to overstate the humaneness and efficacy of this method; one cannot know whether lethal injection is really painless and there is evidence that it is not. As the U.S. Court of Appeals observed, there is "substantial and uncontroverted evidence… that execution by lethal injection poses a serious risk of cruel, protracted death…. Even a slight error in dosage or administration can leave a prisoner conscious but paralyzed while dying, a sentient witness of his or her own asphyxiation." (Chaney v. Heckler, 718 F.2d 1174, 1983). Its veneer of decency and subtle analogy with life-saving medical practice no doubt makes killing by lethal injection more acceptable to the public. Journalist Susan Blaustein, reacting to having witnessed an execution in Texas, comments: "The lethal injection method … has turned dying into a still life, thereby enabling the state to kill without anyone involved feeling anything…. Any remaining glimmers of doubt – about whether the man received due process, about his guilt, about our right to take life – cause us to rationalize these deaths with such catchwords as ‘heinous,’ ‘deserved,’ ‘deterrent,’ ‘justice,’ and ‘painless.’ We have perfected the art of institutional killing to the degree that it has deadened our natural, quintessentially human response to death." Botched Lethal Injections Nor does execution by lethal injection always proceed smoothly as planned. In 1985 "the authorities repeatedly jabbed needles into … Stephen Morin, when they had trouble finding a usable vein because he had been a drug abuser." In 1988, during the execution of Raymond Landry, "a tube attached to a needle inside the inmate's right arm began leaking, sending the lethal mixture shooting across the death chamber toward witnesses." Although the U.S. Supreme Court has held that the current method of lethal injection used is constitutional, several people have suffered because of this form of execution. In Ohio, Rommel Broom was subjected to 18 attempts at finding a vein so that he could be killed by lethal injection. The process to try to execute him took over two hours. Finally, the governor had to stop the execution and grant the inmate a one week reprieve. Mr. Broom has not been executed because he is challenging the state’s right to hold a second execution attempt. Nor was he the only Ohio inmate so maltreated. During his 2006 execution Joseph Clark screamed, “it don’t work” and requested to take something by mouth so the torture would end when his executioners took thirty minutes to find a vein. Christopher Newton’s execution took over two hours – so long that he had to be given a bathroom break. Lethal Injection Protocol Issues Most lethal injections in the United States use a “cocktail” consisting of three drugs that sequentially render an inmate unconscious, cause paralysis and cease breathing, and stop an inmate’s heart.[6] But in 2011, the sole American manufacturer of sodium thiopental, a vital part of the three-drug cocktail, decided to discontinue production, forcing states to adapt their lethal injection methodology.[7] Some states have replaced the three-drug cocktail with a single substance,[8] while others have replaced thiopental in the three-drug sequence with another anesthetic.[9] Both three-drug and single-drug executions raise vital concerns: the three-drug cocktail’s paralyzing sedative may mask the inmate’s pain and suffering, while the single-drug method takes about 25 minutes to end a life (if there are no complications), compared with the ten-minute three-drug process.[10] Although the Supreme Court held in 2008 that Kentucky’s three-drug lethal injection procedure did not violate the Constitution’s ban on cruel and unusual punishment,[11] it is unclear whether states’ adapted procedures pass muster. Indeed, in February 2012, a three-judge panel of the Ninth Circuit Court of Appeals admonished the Arizona Department of Corrections, stating that its approach to execution “cannot continue” and questioning the “regularity and reliability” of protocols that give complete discretion to the corrections director to determine which and how many drugs will be used for each execution.[12] In Georgia, the state Supreme Court stayed the execution of Warren Hill hours before he was scheduled to die in July 2012 in order to review the Department of Corrections’ new single-drug lethal injection procedure.[13] The Missouri Supreme Court imposed a temporary moratorium on executions in August 2012, declaring that it would be “premature” to set execution dates for death row inmates given a pending lawsuit about whether the state’s lethal injection procedures are humane. The state had amended its injection protocol to use a single drug, propofol, which advocates say causes severe pain upon injection.[14] Although similar suits are pending in other states,[15] not all protocol-based challenges have succeeded; in Texas and Oklahoma, executions have continued despite questions about the potential cruelty of lethal injection and the type or number of chemicals used.[16] Regardless of whether states use one or three drugs for an execution, all of the major lethal injection drugs are in short supply due to manufacturers’ efforts to prevent the use of their products for executions[17] and European Union restrictions on the exportation of drugs that may be used to kill.[18] As a result, some state executioners have pursued questionable means of obtaining the deadly chemicals from other states and foreign companies, including a pharmaceutical wholesaler operating out of the back of a London driving school.[19] These backroom deals—which, astoundingly, have been approved by the U.S. Food and Drug Administration (FDA)—are now the subject of federal litigation that could impact the legitimacy of the American death penalty system. In March 2012, six death row inmates argued that the FDA had shirked its duty to regulate lethal substances and raised concerns about the “very real risk that unapproved thiopental will not actually render a condemned prisoner unconscious.”[20] A federal district judge agreed and ordered the FDA to confiscate the imported thiopental, but the agency has appealed.[21] Witnessing the Execution Most people who have observed an execution are horrified and disgusted. "I was ashamed," writes sociologist Richard Moran, who witnessed an execution in Texas in 1985. "I was an intruder, the only member of the public who had trespassed on [the condemned man's] private moment of anguish. In my face he could see the horror of his own death." Revulsion at the duty to supervise and witness executions is one reason why so many prison wardens – however unsentimental they are about crime and criminals – are opponents of capital punishment. Don Cabana, who supervised several executions in Missouri and Mississippi reflects on his mood just prior to witnessing an execution in the gas chamber: "If [the condemned prisoner] was some awful monster deemed worthy of extermination, why did I feel so bad about it, I wondered. It has been said that men on death row are inhuman, cold-blooded killers. But as I stood and watched a grieving mother leave her son for the last time, I questioned how the sordid business of executions was supposed to be the great equalizer…. The 'last mile' seemed an eternity, every step a painful reminder of what waited at the end of the walk. Where was the cold-blooded murderer, I wondered, as we approached the door to the last-night cell. I had looked for that man before… and I still had not found him – I saw, in my grasp, only a frightened child. [Minutes after the execution and before] heading for the conference room and a waiting press corps, I… shook my head. 'No more. I don't want to do this anymore.'" 1996) Recently, Allen Ault, former executioner for the State of Georgia, wrote, “The men and women who assist in executions are not psychopaths or sadists. They do their best to perform the impossible and inhumane job with which the state has charged them. Those of us who have participated in executions often suffer something very much like posttraumatic stress. Many turn to alcohol and drugs. For me, those nights that weren’t sleepless were plagued by nightmares.” For some individuals, however, executions seem to appeal to strange, aberrant impulses and provide an outlet for sadistic urges. Warden Lewis Lawes of Sing Sing Prison in New York wrote of the many requests he received to watch electrocutions, and told that when the job of executioner became vacant. "I received more than seven hundred applications for the position, many of them offering cut-rate prices." (Life and Death in Sing Sing 1928) Public executions were common in this country during the 19th and early 20th centuries. One of the last ones occurred in 1936 in Kentucky, when 20,000 people gathered to watch the hanging of a young African American male. (Teeters, in Journal of the Lancaster County Historical Society 1960) Delight in brutality, pain, violence and death may always be with us. But surely we must conclude that it is best for the law not to encourage such impulses. When the government sanctions, commands, and ceremoniously carries out the execution of a prisoner, it lends support to this destructive side of human nature. More than two centuries ago the Italian jurist Cesare Beccaria, in his highly influential treatise On Crimes and Punishment (1764), asserted: "The death penalty cannot be useful, because of the example of barbarity it gives men." Beccaria's words still ring true – even if the death penalty were a "useful" deterrent, it would still be an "example of barbarity." No society can safely entrust the enforcement of its laws to torture, brutality, or killing. Such methods are inherently cruel and will always mock the attempt to cloak them in justice. As Supreme Court Justice Arthur J. Goldberg wrote, "The deliberate institutionalized taking of human life by the state is the greatest conceivable degradation to the dignity of the human personality."(Boston Globe, August 16, 1976) Death Row Syndrome Capital appeals are not only costly; they are also time-consuming. The average death row inmate waits 12 years between sentencing and execution, and some sit in anticipation of their executions on death row for up to 30 years.[22] For these prisoners, most of whom are housed in solitary confinement, this wait period may cause “Death Row Phenomenon” or “Death Row Syndrome.” Although the terms are often used interchangeably, “Death Row Phenomenon” refers to the destructive consequences of long-term solitary confinement[23] and the inevitable anxiety that results from awaiting one’s own death, while “Death Row Syndrome” refers to the severe psychological illness that often results from Death Row Phenomenon.[24] In solitary confinement, inmates are often isolated for 23 hours each day without access to training or educational programs, recreational activities, or regular visits. Such conditions have been demonstrated to provoke agitation, psychosis, delusions, paranoia, and self-destructive behavior.[25] To inflict this type of mental harm is inhumane, but it also may prove detrimental to public safety. When death row inmates successfully appeal their sentences, they are transferred into the general inmate population, and when death row inmates are exonerated, they are promptly released into the community.[26] Death Row Syndrome needlessly risks making these individuals dangerous to those around them. Neither Death Row Syndrome nor Death Row Phenomenon has received formal recognition from the American Psychiatric Association or the American Psychological Association.[27] In 1995, however, Justices Stevens and Breyer, in a memorandum regarding the Supreme Court’s denial of certiorari to death row inmate Clarence Lackey, highlighted the “importance and novelty” of the question “whether executing a prisoner who has already spent some 17 years on death row violates the Eighth Amendment’s prohibition against cruel and unusual punishment.”[28] Further, as some scholars and advocates have noted, the mental deterioration symptomatic of Death Row Syndrome may render an inmate incompetent to participate in their own post-conviction proceedings.[29] Death Row Syndrome gained international recognition during the 1989 extradition proceedings of Jens Soering, a German citizen arrested in England and charged with committing murder on American soil.[30] Soering argued, and the European Court of Human Rights agreed, that extraditing him to the United States would violate Article 3 of the European Convention for the Protection of Human Rights and Fundamental Freedoms.[31] The Court explained that, in the United States, “the condemned prisoner has to endure for many years the conditions on death row and the anguish and mounting tension of living in the ever-present shadow of death” such that extraditing Soering would violate protections against “inhuman or degrading treatment or punishment.”[32] Similar conclusions have been reached by the United Kingdom’s Judicial Committee of the Privy Council, the United Nations Human Rights Committee, and the Canadian Supreme Court.[33] CAPITAL PUNISHMENT IS UNJUSTIFIED RETRIBUTION Justice, it is often insisted, requires the death penalty as the only suitable retribution for heinous crimes. This claim does not bear scrutiny, however. By its nature, all punishment is retributive. Therefore, whatever legitimacy is to be found in punishment as just retribution can, in principle, be satisfied without recourse to executions. Moreover, the death penalty could be defended on narrowly retributive grounds only for the crime of murder, and not for any of the many other crimes that have frequently been made subject to this mode of punishment (rape, kidnapping, espionage, treason, drug trafficking). Few defenders of the death penalty are willing to confine themselves consistently to the narrow scope afforded by retribution. In any case, execution is more than a punishment exacted in retribution for the taking of a life. As Nobel Laureate Albert Camus wrote, "For there to be equivalence, the death penalty would have to punish a criminal who had warned his victim of the date at which he would inflict a horrible death on him and who, from that moment onward, had confined him at his mercy for months. Such a monster is not encountered in private life." (Reflections on the Guillotine, in Resistance, Rebellion, and Death 1960) It is also often argued that death is what murderers deserve, and that those who oppose the death penalty violate the fundamental principle that criminals should be punished according to their just desserts – "making the punishment fit the crime." If this rule means punishments are unjust unless they are like the crime itself, then the principle is unacceptable: It would require us to rape rapists, torture torturers, and inflict other horrible and degrading punishments on offenders. It would require us to betray traitors and kill multiple murderers again and again – punishments that are, of course, impossible to inflict. Since we cannot reasonably aim to punish all crimes according to this principle, it is arbitrary to invoke it as a requirement of justice in the punishment of murder. If, however, the principle of just deserts means the severity of punishments must be proportional to the gravity of the crime – and since murder is the gravest crime, it deserves the severest punishment – then the principle is no doubt sound. Nevertheless, this premise does not compel support for the death penalty; what it does require is that other crimes be punished with terms of imprisonment or other deprivations less severe than those used in the punishment of murder. Criminals no doubt deserve to be punished, and the severity of the punishment should be appropriate to their culpability and the harm they have caused the innocent. But severity of punishment has its limits – imposed by both justice and our common human dignity. Governments that respect these limits do not use premeditated, violent homicide as an instrument of social policy. Murder Victims Families Oppose the Death Penalty Some people who have lost a loved one to murder believe that they cannot rest until the murderer is executed. But this sentiment is by no means universal. Coretta Scott King has observed, "As one whose husband and mother-in-law have died the victims of murder and assassination, I stand firmly and unequivocally opposed to the death penalty for those convicted of capital offenses. An evil deed is not redeemed by an evil deed of retaliation. Justice is never advanced in the taking of a human life. Morality is never upheld by a legalized murder." (Speech to National Coalition to Abolish the Death Penalty, Washington, D.C., September 26, 1981) Kerry Kennedy Cuomo, daughter of the slain Senator Robert Kennedy, has written: "I was eight years old when my father was murdered. It is almost impossible to describe the pain of losing a parent to a senseless murder.…But even as a child one thing was clear to me: I didn't want the killer, in turn, to be killed. I remember lying in bed and praying, 'Please, God. Please don't take his life too.' I saw nothing that could be accomplished in the loss of one life being answered with the loss of another. And I knew, far too vividly, the anguish that would spread through another family – another set of parents, children, brothers, and sisters thrown into grief."(Foreword to Gray and Stanley, A Punishment in Search of A Crime 1989) Across the nation, many who have survived the murder of a loved one have joined Murder Victims' Families for Reconciliation or Murder Victims Families for Human Rights, in the effort to replace anger and hate toward the criminal with a restorative approach to both the offender and the bereaved survivors. Groups of murder victims family members have supported campaigns for abolition of the death penalty in Illinois, Connecticut, Montana and Maryland most recently. Barbara Anderson Young, the sister of James Anderson, who was allegedly run over by a white teenager in Mississippi in 2011, who reportedly wanted to hurt him because he was Black, wrote a letter to the local prosecutor on behalf of their family indicating the family’s opposition to the death penalty, which is “deeply rooted in our religious faith, a faith that was central in James’ life as well.” The letter also eloquently asks that the defendant be spared execution because the death penalty “historically has been used in Mississippi and the South primarily against people of color for killing whites.” It continues, “[e]xecuting James' killers will not help balance the scales. But sparing them may help to spark a dialogue that one day will lead to the elimination of capital punishment." Lawrence Brewer, convicted of the notorious dragging death of James Byrd in Texas, was executed in 2011. Members of Mr. Byrd’s family opposed the death penalty, despite the racist and vicious nature of the killing. Of Brewer’s remorseless – he said he had no regrets the day he was executed – Byrd’s sister, Betty Boatner, said, “If I could say something to him, I would let him know that I forgive him and then if he still has no remorse, I just feel sorry for him.” Byrd’s daughter shared that she didn’t want Brewer to die because “it’s easy . . .(a)ll he’s going to do it go to sleep” rather than live every day with what he did and perhaps one day recognize the humanity of his victim. James Byrd’s son, Ross, points out "You can't fight murder with murder . . .(l)ife in prison would have been fine. I know he can't hurt my daddy anymore. I wish the state would take in mind that this isn't what we want." CAPITAL PUNISHMENT COSTS MORE THAN INCARCERATION It is sometimes suggested that abolishing capital punishment is unfair to the taxpayer, on the assumption that life imprisonment is more expensive than execution. If one takes into account all the relevant costs, however, just the reverse is true. "The death penalty is not now, nor has it ever been, a more economical alternative to life imprisonment.") A murder trial normally takes much longer when the death penalty is at issue than when it is not. Litigation costs – including the time of judges, prosecutors, public defenders, and court reporters, and the high costs of briefs – are mostly borne by the taxpayer. The extra costs of separate death row housing and additional security in court and elsewhere also add to the cost. A 1982 study showed that were the death penalty to be reintroduced in New York, the cost of the capital trial alone would be more than double the cost of a life term in prison. (N.Y. State Defenders Assn., "Capital Losses" 1982) The death penalty was eventually reintroduced in New York and then found unconstitutional and not reintroduced again, in part because of cost. In Maryland, a comparison of capital trial costs with and without the death penalty for the years concluded that a death penalty case costs "approximately 42 percent more than a case resulting in a non-death sentence." In 1988 and 1989 the Kansas legislature voted against reinstating the death penalty after it was informed that reintroduction would involve a first-year cost of more than $11 million.59 Florida, with one of the nation's most populous death rows, has estimated that the true cost of each execution is approximately $3.2 million, or approximately six times the cost of a life-imprisonment sentence." (David von Drehle, "Capital Punishment in Paralysis," Miami Herald, July 10, 1988) A 1993 study of the costs of North Carolina's capital punishment system revealed that litigating a murder case from start to finish adds an extra $163,000 to what it would cost the state to keep the convicted offender in prison for 20 years. The extra cost goes up to $216,000 per case when all first-degree murder trials and their appeals are considered, many of which do not end with a death sentence and an execution. In 2011 in California, a broad coalition of organizations called Taxpayers for Justice put repeal of the death penalty on the ballot for 2012 in part because of the high cost documented by a recent study that found the state has already spent $4 billion on capital punishment resulting in 13 executions. The group includes over 100 law enforcement leaders, in addition to crime-victim advocates and exonerated individuals. Among them is former Los Angeles County District Attorney Gil Garcetti, whose office pursued dozens of capital cases during his 32 years as a prosecutor. He said, "My frustration is more about the fact that the death penalty does not serve any useful purpose and it's very expensive." Don Heller, a Republican and former prosecutor, wrote "I am convinced that at least one innocent person may have been executed under the current death penalty law. It was not my intent nor do I believe that of the voters who overwhelmingly enacted the death penalty law in 1978. We did not consider that horrific possibility." Heller emphasized that he is not "soft on crime," but that "life without parole protects public safety better than a death sentence." Additionally, he said the money spent on the death penalty could be better used elsewhere, as California cuts funding for police officers and prosecutors. "Paradoxically, the cost of capital punishment takes away funds that could be used to enhance public safety."[34] From one end of the country to the other public officials decry the additional cost of capital cases even when they support the death penalty system. "Wherever the death penalty is in place, it siphons off resources which could be going to the front line in the war against crime…. Politicians could address this crisis, but, for the most part they either endorse executions or remain silent." The only way to make the death penalty more "cost effective" than imprisonment is to weaken due process and curtail appellate review, which are the defendant's (and society's) only protection against the most aberrant miscarriages of justice. Any savings in dollars would, of course, be at the cost of justice: In nearly half of the death-penalty cases given review under federal habeas corpus provisions, the murder conviction or death sentence was overturned. In 1996, in response to public clamor for accelerating executions, Congress imposed severe restrictions on access to federal habeas corpus and also ended all funding of the regional death penalty "resource centers" charged with providing counsel on appeal in the federal courts. (Carol Castenada, "Death Penalty Centers Losing Support Funds," USA Today, Oct. 24, 1995) These restrictions virtually guarantee that the number and variety of wrongful murder convictions and death sentences will increase. The savings in time and money will prove to be illusory. CAPITAL PUNISHMENT IS LESS POPULAR THAN THE ALTERNATIVES It is commonly reported that the American public overwhelmingly approves of the death penalty. More careful analysis of public attitudes, however, reveals that most Americans prefer an alternative; they would oppose the death penalty if convicted murderers were sentenced to life without parole and were required to make some form of financial restitution. In 2010, when California voters were asked which sentence they preferred for a first-degree murderer, 42% of registered voters said they preferred life without parole and 41% said they preferred the death penalty. In 2000, when voters were asked the same question, 37% chose life without parole while 44% chose the death penalty. A 1993 nationwide survey revealed that although 77% of the public approves of the death penalty, support drops to 56% if the alternative is punishment with no parole eligibility until 25 years in prison. Support drops even further, to 49%, if the alternative is no parole under any conditions. And if the alternative is no parole plus restitution, it drops still further, to 41%. Only a minority of the American public would favor the death penalty if offered such alternatives. INTERNATIONALLY, CAPITAL PUNISHMENT IS WIDELY VIEWED AS INHUMANE AND ANACHRONISTIC An international perspective on the death penalty helps us understand the peculiarity of its use in the United States. As long ago as 1962, it was reported to the Council of Europe that "the facts clearly show that the death penalty is regarded in Europe as something of an anachronism…." 1962) Today, either by law or in practice, all of Western Europe has abolished the death penalty. In Great Britain, it was abolished (except for cases of treason) in 1971; France abolished it in 1981. Canada abolished it in 1976. The United Nations General Assembly affirmed in a formal resolution that throughout the world, it is desirable to "progressively restrict the number of offenses for which the death penalty might be imposed, with a view to the desirability of abolishing this punishment." By mid-1995, eighteen countries had ratified the Sixth Protocol to the European Convention on Human Rights, outlawing the death penalty in peacetime. Underscoring worldwide support for abolition was the action of the South African constitutional court in 1995, barring the death penalty as an "inhumane" punishment. Between 1989 and 1995, two dozen other countries abolished the death penalty for all crimes. Since 1995, 43 more abolished it. All told, 71% of the world’s nation’s have abolished the death penalty in law or practice; only 58 of 197 retain it. International Law A look at international trends and agreements sheds light on the peculiarity of the United States’ continued imposition of capital punishment. Today, over 140 nations have abolished the death penalty either by law or in practice and, of the 58 countries that have retained the death penalty, only 21 carried out known executions in 2011.[35] Furthermore, capital punishment has compelled the United States to abstain from signing or ratifying several major international treaties and perhaps to violate international agreements to which it is a party: In 1989, the General Assembly adopted the Second Optional Protocol to the International Covenant on Civil and Political Rights (ICCPR), one of the UN’s primary human rights treaties.[36] Parties to the Protocol must take all necessary measures to abolish the death penalty and protect their citizens’ right not to be executed, although signatories may reserve the right to apply the death penalty for serious military criminals during wartime.[37] The United States has yet to join the 35 signatories or 75 parties to the Protocol, trailing behind the world’s leading democracies in the protection of human rights. Although the Second Protocol to the ICCPR is the only worldwide instrument calling for death penalty abolition, there are three such instruments with regional emphases. Adopted by the Council of Europe in 1982 and ratified by eighteen nations by mid-1995, the Sixth Protocol of the European Convention on Human Rights (ECHR) provides for the abolition of capital punishment during peacetime. In 2002, the Council adopted the Thirteenth Protocol to the ECHR, which provides for the abolition of the death penalty in all circumstances, including times of war or imminent threat of war. In 1990, the Organization of American States adopted the Protocol to the American Convention on Human Rights to Abolish the Death Penalty, which provides for total abolition but allows states to reserve the right to apply the death penalty during wartime.[38] The United States has ratified the Vienna Convention on Consular Relations (VCCR), an international treaty setting forth a framework for consular relations among independent countries. Under Article 36 of the VCCR, local authorities are obligated to inform all detained foreigners “without delay” of their right to request consular notification of their detention and their right to demand and access opportunities to communicate with their consular representatives.[39] Local authorities have repeatedly disregarded this obligation, resulting in the International Court of Justice holding in 2004 that states had violated the VCCR by failing to inform 51 named Mexican nationals of their rights. All 51 were sentenced to death. When the State of Texas refused to honor this judgment and provide relief for the 15 death-row inmates whose VCCR rights it had violated, President George W. Bush sought to intervene on the prisoners’ behalf, taking the case to the United States Supreme Court. The Court denied the President’s appeal, and Texas has gone on to execute inmates whose VCCR rights it had failed to honor. In 1994, the United States signed the United Nations (UN) Convention Against Torture and Other Cruel, Inhuman or Degrading Treatment or Punishment (CAT).[40] The treaty, which has now been ratified or signed by 176 nations, outlaws the imposition of physical or psychological abuse on people in detention. While it does not explicitly prohibit capital punishment, the treaty does forbid the intentional infliction of pain. Since 1976, however, more than 20 executions in the United States have involved prolonged, painful, or shocking errors, such as an inmate’s head catching fire or a lengthy and torturous search for a vein suitable for lethal injection. Additionally, accidents aside, our methods of execution—lethal injection, electrocution, firing squad, gas chamber, and hanging—may be inherently painful. The CAT also forbids the infliction of pain and suffering “based on discrimination of any kind,” [41] yet racial inequality is endemic to our death rows. Also in 1994, the United States ratified the International Convention on the Elimination of all forms of Racial Discrimination (ICERD), a treaty intended to protect against racial discrimination, whether intentional or resulting from seemingly neutral state policies. To meet its obligations as a party to ICERD, the United States must take steps to review and amend policies and procedures that create or perpetuate racial discrimination, including capital punishment.[42] Once in use everywhere and for a wide variety of crimes, the death penalty today is generally forbidden by law and widely abandoned in practice, in most countries outside the United States. Indeed, the unmistakable worldwide trend is toward the complete abolition of capital punishment. In the United States, opposition to the death penalty is widespread and diverse. Catholic, Jewish, and Protestant religious groups are among the more than 50 national organizations that constitute the National Coalition to Abolish the Death Penalty. The Case Against the Death Penalty was first published by the ACLU as a pamphlet in 1973. The original text was written by Hugo Adam Bedau, Ph.D., who also contributed to several subsequent editions of the pamphlet. This version was most recently revised by the ACLU in 2012. [1] Glenn L. Pierce & Michael L. Radelet, Death Sentencing in East Baton Rouge Parish, 1990-2008, 71 La. L. Rev. 647, 671 (2011), available at http://www.deathpenaltyinfo.org/documents/PierceRadeletStudy.pdf. [2] Liebman et. al, Los Tocayos Carlos, 43 Colum. Hum. Rts. L. Rev. 711, 1104 (2012). [3] See Andrew Cohen, Yes, America, We Have Executed an Innocent Man, Atlantic, May 14, 2012, http://www.theatlantic.com/national/archive/2012/05/yes-america-we-have-executed-an-innocent-man/257106/. [4] See id. [5] See id.; Carlos DeLuna Case: The Fight to Prove an Innocent Man Was Executed, PBS Newshour, May 24, 2012, http://www.pbs.org/newshour/bb/law/jan-june12/deathpenalty_05-24.html. [6] A Three-Drug Cocktail, WashingtonPost.com, Sep. 26, 2007, http://www.washingtonpost.com/wp-dyn/content/graphic/2007/09/26/GR2007092600116.html; see also Victoria Gill, The Search for a Humane Way to Kill, BBC News, Aug. 7, 2012, http://www.bbc.co.uk/news/magazine-19060961. [7] See Carol J. Williams, Maker of Anesthetic Used in Executions is Discontinuing Drug, L.A. Times, Jan. 22, 2011, http://articles.latimes.com/2011/jan/22/local/la-me-execution-drug-20110122; John Schwartz, Death Penalty Drug Raises Legal Questions, N.Y. Times, Apr. 13, 2011, http://www.nytimes.com/2011/04/14/us/14lethal.html?pagewanted=all. [8] See Brandi Grissom, Texas Will Change its Lethal Injection Protocol, Tex. Tribune, July 10, 2012, www.texastribune.org/texas-dept-criminal-justice/death-penalty/texas-changing-its-lethal-injection-protocol/; Rob Stein, Ohio Executes Inmate Using New, Single-Drug Method for Death Penalty, Wash. Post, Mar. 11, 2011, http://www.washingtonpost.com/wp-dyn/content/article/2011/03/10/AR2011031006250.html; David Beasley, Georgia Delays Execution Amid Drug Protocol Change, Reuters, July, 17, 2012, http://www.reuters.com/article/2012/07/17/us-usa-execution-georgia-idUSBRE86G14L20120717; Rhonda Cook & Bill Rankin, State Changes Lethal Injection Protocol, Reschedules Execution, Atlanta Journal-Constitution, July 17, 2012, http://www.ajc.com/news/atlanta/state-changes-lethal-injection-1479424.html; Steve Eder, A Texas First: Single-Drug Used to Execute Inmate, WSJ Law Blog, http://blogs.wsj.com/law/2012/07/19/a-texas-first-single-drug-used-to-execute-inmate/; Idaho Switches Execution Protocol to Single-Drug Lethal Injection, Spokesman.com, May 18, 2012, http://www.spokesman.com/blogs/boise/2012/may/18/idaho-switches-execution-protocol-single-drug-lethal-injection/. [9] See Carol J. Williams, California’s New Lethal Injection Protocol Tossed By Judge, L.A. Times, Dec. 17, 2011, http://articles.latimes.com/2011/dec/17/local/la-me-executions-20111217; Kathy Lohr, New Lethal Injection Drug Raises Concerns, NPR, Jan. 29, 2011, http://www.npr.org/2011/01/29/133302950/new-lethal-injection-drug-raises-concerns; Steve Eder, Virginia Adds New Drug for Lethal Injections, WSJ Law Blog, July 27, 2012, http://blogs.wsj.com/law/2012/07/27/virginia-adds-new-drug-for-lethal-injections/. [10] Laura Vozzella, Virginia opts for One-Drug Lethal Injection Protocol, Wash. Post, July 27, 2012, http://www.washingtonpost.com/local/dc-politics/virginia-opts-for-one-drug-lethal-injection-protocol/2012/07/27/gJQA8jxiEX_story.html. [11] See Linda Greenhouse, Supreme Court Allows Lethal Injection for Execution, N.Y. Times, Apr. 17, 2008, http://www.nytimes.com/2008/04/17/us/16cnd-scotus.html?pagewanted=all. [12] See Michael Kiefer, State is Sued Again Over Its Lethal-Injection Procedure, USA Today, Feb. 7, 2012, http://www.usatoday.com/USCP/PNI/Valley%20&%20State/2012-02-07-PNI0207met--executionsART_ST_U.htm; Court Gives Arizona Warning About Execution Protocol, Associated Press, Feb. 28, 2012, available at http://www.azcentral.com/community/pinal/articles/2012/02/28/20120228arizona-moorman-execution-death-row-inmate-lawyers-seek-stays.html. Notably, however, the panel did not halt Arizona’s scheduled executions. Id. [13] David Beasley, Georgia Inmate Gets Stay Hours Before Scheduled Execution, Reuters, July 23, 2012, http://www.reuters.com/article/2012/07/23/us-usa-execution-georgia-idUSBRE86M1F720120723. [14] Steve Eder, Missouri Executions on Hold Amid Concerns About New Drug, Aug. 15, 2012, WSJ Law Blog, http://blogs.wsj.com/law/2012/08/15/missouri-executions-on-hold-amid-concerns-about-new-drug/. [15] Melissa Anderson, ACLU Challenges Montana’s Lethal Injection Protocol, KXLH.com, Aug. 1, 2012, http://www.kxlh.com/news/aclu-challenges-montana-s-lethal-injection-protocol/. [16] See Eder, supra note 3; Steve Olfason, Oklahoma to Execute Man Who Killed Ex-Girlfriend and Her Two Kids, Chicago Tribune, Aug. 14, 2012, http://articles.chicagotribune.com/2012-08-14/news/sns-rt-us-usa-execution-oklahomabre87d0s8-20120814_1_jerry-massie-method-of-lethal-injection-three-drug-protocol; Steve Eder, Oklahoma Execution Set After Lethal Injection Challenge Fails, Aug. 13, 2012, WSJ Law Blog, http://blogs.wsj.com/law/2012/08/13/oklahoman-set-for-executution-after-lethal-injection-challenge-fails/. [17] See Grissom, supra note 3; Ed Pilkington, Texas Executions Threatened As Stocks of Death Penalty Drug Run Low, Guardian, Feb. 14, 2012, http://www.guardian.co.uk/world/2012/feb/14/texas-executions-threatened-stocks-run-low; John Schwartz, Seeking Execution Drug, States Cut Legal Corners, N.Y. Times, Apr. 13, 2011, http://www.nytimes.com/2011/04/14/us/14lethal.html?pagewanted=all; Kiefer, supra note 7. [18] EU Imposes Strict Controls on ‘Execution Drug’ Exports, BBC News, Dec. 20, 2011, http://www.bbc.co.uk/news/world-europe-16281016; Matt McGrath, FDA Goes to Court to Secure Drugs for Lethal Injections, BBC World, June 1, 2012, http://www.bbc.co.uk/news/science-environment-18253578. [19] See Jeremy Pelofsky, U.S. Wants Lawsuit Over Execution Drug Dismissed, Reuters, Apr. 20, 2011, http://www.reuters.com/article/2011/04/20/us-usa-execution-lawsuit-idUSTRE73J7MH20110420; Michael Kiefer, Execution Drugs: Arizona Inmate Lawsuit Seeks FDA Policing, Ariz. Republic, Feb. 3, 2011, http://www.azcentral.com/news/articles/2011/02/02/20110202arizona-execution-drug-fda-lawsuit-brk02-ON.html; Kevin Johnson, Lawsuit Seeks to Block Imports of Key Execution Drug, USA Today, Feb. 2, 2011, http://content.usatoday.com/communities/ondeadline/post/2011/02/lawsuit-seeks-to-block-imports-of-key-execution-drug/1#.UA2pmKBCzGc; Ryan Gabrielson, Lethal Injection Drug Tied to London Wholesaler, California Watch, Jan. 7, 2011, http://californiawatch.org/dailyreport/lethal-injection-drug-tied-london-wholesaler-7888; Ryan Gabrielson, California Lethal Injection: Prison Officials Refuse to Hand Over Lethal Injection Drug, California Watch, May 30, 2012, available at http://www.huffingtonpost.com/2012/05/30/california-lethal-injection_n_1556155.html. [20] Pelofsky, supra note 14. [21] See Raymond Bonner, FDA’s Immoral Stance on Lethal Injection Drugs, Bloomberg, July 29, 2012, http://www.bloomberg.com/news/2012-07-29/fda-s-immoral-stance-on-lethal-injection-drugs.html. [22] See Elizabeth Rapaport, A Modest Proposal: The Aged of Death Row Should be Deemed Too Old to Execute, 77 Brook. L. Rev. 1089 (Spring 2012); Michael J. Carter, Wanting to Die: The Cruel Phenomenon of “Death Row Syndrome”, Alternet, Nov. 7, 2008, http://www.alternet.org/rights/106300/waiting_to_die%3A_the_cruel_phenomenon_of_%22death_row_syndrome%22/ ; Dr. Karen Harrison and Anouska Tamony, Death Row Phenomenon, Death Row Syndrome, and Their Affect [sic.] on Capital Cases in the U.S., Internet Journal of Criminology 2010, available at http://www.internetjournalofcriminology.com/Harrison_Tamony_%20Death_Row_Syndrome%20_IJC_Nov_2010.pdf. [23] See Stop Solitary – The Dangerous Overuse of Solitary Confinement in the United States, ACLU.org, https://www.aclu.org/stop-solitary-dangerous-overuse-solitary-confinement-united-states-0. [24] See Harrison and Tamony, supra note 25. [25] See Carter, supra note 25; Death Penalty Information Center, Time on Death Row (2006), at http://www.deathpenaltyinfo.org/time-death-row. [26] See id. [27] Amy Smith, Not “Waiving” But Drowning: The Anatomy of Death Row Syndrome and Volunteering for Execution, 17 B.U. Pub. Int. L.J. 237, 243, available at http://www.bu.edu/law/central/jd/organizations/journals/pilj/vol17no2/documents/17-2SmithArticle.pdf. [28] Lackey v. Texas, 115 S. Ct. 1421, 1421 (1995) (Stevens, J., concurring in the denial of certiorari). [29] Stephen Blank, Killing Time: The Process of Waiving Appeal – The Michael Ross Death Penalty Cases, 14 J.L. & Pol’y 735, 738-39 (2006). [30] Soering v. UK, App. No. 14038/88, 11 Eur. H.R. Rep. 439 (1989), available at http://eji.org/eji/files/Soering%20v.%20United%20Kingdom.pdf. [31] See David Wallace-Wells, What is Death Row Syndrome?, Slate, Feb. 1, 2005, http://www.slate.com/articles/news_and_politics/explainer/2005/02/what_is_death_row_syndrome.html; Smith supra note 30. [32] Smith supra note 30. (quoting Soering, 11 Eur. H. R. Rep. at 475-76). [33] Id. at 239. [34] Carol J. Williams, Death Penalty: Exhaustive Study Finds Death Penalty Costs California $184 Million a Year, L.A. Times, June 20, 2011, http://articles.latimes.com/2011/jun/20/local/la-me-adv-death-penalty-costs-20110620. [35] Figures on the Death Penalty, Amnesty International, http://www.amnesty.org/en/death-penalty/numbers. [36] UN General Assembly, Second Optional Protocol to the International Covenant on Civil and Political Rights, Aiming at the Abolition of the Death Penalty, Dec. 15, 1989, A/RES/44/128, available at: http://www.unhcr.org/refworld/docid/3ae6b3a70.html [accessed 15 August 2012] [hereinafter Second Optional Protocol]. [37] See Pierre Desert, Second Optional Protocol: Frequently Asked Questions, World Coalition Against the Death Penalty, June 27, 2008, http://www.worldcoalition.org/Second-Optional-Protocol-Frequently-Asked-Questions.html; Pierre Desert, Second Optional Protocol: The Only Global Treaty Aiming at the Abolition of the Death Penalty, World Coalition Against the Death Penalty, June 24, 2008, www.worldcoalition.org/UN-Protocol-the-only-global-treaty-aiming-at-the-abolition-of-the-death-penalty.html; Second Optional Protocol, supra note 21. [38] Desert, Second Optional Protocol: Frequently Asked Questions, supra note 22. [39] Vienna Convention on Consular Relations, Mar. 19, 1967, 596 U.N.T.S. 261, available at http://untreaty.un.org/ilc/texts/instruments/english/conventions/9_2_1963.pdf. [40] United Nations Convention Against Torture and Other Cruel, Inhuman or Degrading Treatment or Punishment, Dec. 10, 1984, 1465 U.N.T.S. 85, available at http://treaties.un.org/doc/publication/UNTS/Volume%201465/v1465.pdf. [41] Richard C. Dieter, Introduction: Ford Foundation Symposium, Nov. 12, 1998, available at http://www.deathpenaltyinfo.org/us-death-penalty-and-international-law-us-compliance-torture-and-race-conventions. [42] International Convention on the Elimination of All Forms of Racial Discrimination, Mar. 7, 1966, 660 U.N.T.S. 195, available at http://treaties.un.org/doc/publication/UNTS/Volume%20660/v660.pdf.
/** * Created by melanieh on 1/19/17. */ public class MovieProvider extends ContentProvider { public static final String favoritesTable = FavoriteEntry.TABLE_NAME; // This cursor will hold the result of the query Cursor cursor; /** {@link MovieProvider} */ /** Tag for the log messages */ public final String LOG_TAG = MovieProvider.class.getSimpleName(); /** URI matcher code for the content URI for the favorites table */ private static final int FAVORITES = 100; /** URI matcher code for a single favorite's content URI */ private static final int FAVORITE_ID = 101; /** * UriMatcher object to match a content URI to a corresponding code. * The input passed into the constructor represents the code to return for the root URI. * It's common to use NO_MATCH as the input for this case. */ private static final UriMatcher sUriMatcher = new UriMatcher(UriMatcher.NO_MATCH); // Static initializer. This is run the first time anything is called from this class. static { // The calls to addURI() go here, for all of the content URI patterns that the provider // should recognize. All paths added to the UriMatcher have a corresponding code to return // when a match is found. sUriMatcher.addURI(MovieContract.CONTENT_AUTHORITY, MovieContract.PATH_MOVIES, FAVORITES); sUriMatcher.addURI(MovieContract.CONTENT_AUTHORITY, MovieContract.PATH_MOVIES + "/#", FAVORITE_ID); } SQLiteDatabase db; /** Database helper object */ private MovieDBHelper mDbHelper; @Override public boolean onCreate() { mDbHelper = new MovieDBHelper(getContext()); return true; } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { // Get readable database db = mDbHelper.getReadableDatabase(); // Figure out if the URI matcher can match the URI to a specific code int match = sUriMatcher.match(uri); switch (match) { case FAVORITES: cursor = db.query(FavoriteEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder); break; case FAVORITE_ID: selection = FavoriteEntry._ID + "=?"; selectionArgs = new String[] { String.valueOf(ContentUris.parseId(uri)) }; // Cursor containing that row of the table. cursor = db.query(FavoriteEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder); break; default: throw new IllegalArgumentException("Cannot query unknown URI " + uri); } cursor.setNotificationUri(getContext().getContentResolver(), uri); return cursor; } @Override public Uri insert(Uri uri, ContentValues contentValues) { final int match = sUriMatcher.match(uri); switch (match) { case FAVORITES: // updating for the favorites table is only supported for the entire table so the selection // and selectionArgs values are set to null here return saveFavorite(uri, contentValues, null, null); default: throw new IllegalArgumentException("Insertion is not supported for " + uri); } } /*** helper method for updating the favorites */ private Uri saveFavorite(Uri uri, ContentValues contentValues, String selection, String selectionArgs) { db = mDbHelper.getWritableDatabase(); long newRowId = db.insert(FavoriteEntry.TABLE_NAME, null, contentValues); // if (newRowId == -1) { // //// Toast.makeText(getContext(), getContext().getString(R.string.error_inserting_favorite), //// Toast.LENGTH_SHORT).show(); //// } else { //// Toast.makeText(getContext(), getContext().getString(R.string.insert_favorite_successful), //// Toast.LENGTH_SHORT).show(); //// } // getContext().getContentResolver().notifyChange(uri, null); // } // Return the new URI with the ID (of the newly inserted row) appended at the end return ContentUris.withAppendedId(FavoriteEntry.CONTENT_URI, newRowId); } @Override public int update(Uri uri, ContentValues contentValues, String selection, String[] selectionArgs) { // prevents calls to this method for individual favorites; // favorites table supports only updates to the entire table, i.e. by either insertion of a new // favorite (handled by insert method here) or deletion of existing favorite(s) (handled by // delete method here) return 0; } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { // Get writeable database db = mDbHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); switch (match) { case FAVORITES: // Delete all rows that match the selection and selection args deleteFavorites(); case FAVORITE_ID: // Delete a single row given by the ID in the URI selection = FavoriteEntry._ID + "=?"; selectionArgs = new String[] { String.valueOf(ContentUris.parseId(uri)) }; return db.delete(FavoriteEntry.TABLE_NAME, selection, selectionArgs); default: throw new IllegalArgumentException("Deletion is not supported for " + uri); } } private void deleteFavorites() { db = mDbHelper.getWritableDatabase(); int numRowsDeleted = db.delete(favoritesTable, null, null); if (numRowsDeleted == 0) { Toast.makeText(getContext(), getContext().getString(R.string.error_deleting_favorite), Toast.LENGTH_SHORT).show(); } else { Toast.makeText(getContext(), getContext().getString(R.string.favorite_deletion_successful), Toast.LENGTH_SHORT).show(); } } @Override public String getType(Uri uri) { final int match = sUriMatcher.match(uri); switch (match) { case FAVORITES: return FavoriteEntry.CONTENT_LIST_TYPE; case FAVORITE_ID: return FavoriteEntry.CONTENT_ITEM_TYPE; default: throw new IllegalStateException("Unknown URI " + uri + " with match " + match); } } }
<reponame>DarkPathUnderTheSun/FIME-various-assignments<filename>ProgLangsExam/c/Actividad2/programa8.c<gh_stars>0 //Programa 8 #include<stdio.h> void main() { int x,yy,a,b,c,d; x = 1; yy = 1; a = 3; b = 4; c = -1; d = 1; if (a != b) if (c == d) yy = 1; else yy = 0; else x = 0; } //Comportamiento: //x almacena verdadero y yy falso
/* * linux/arch/arm/mach-integrator/integrator_cp.c * * Copyright (C) 2003 Deep Blue Solutions Ltd * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License. */ #include <linux/types.h> #include <linux/kernel.h> #include <linux/init.h> #include <linux/list.h> #include <linux/platform_device.h> #include <linux/dma-mapping.h> #include <linux/slab.h> #include <linux/string.h> #include <linux/sysdev.h> #include <linux/amba/bus.h> #include <linux/amba/kmi.h> #include <linux/amba/clcd.h> #include <asm/hardware.h> #include <asm/io.h> #include <asm/irq.h> #include <asm/setup.h> #include <asm/mach-types.h> #include <asm/hardware/icst525.h> #include <asm/arch/cm.h> #include <asm/arch/lm.h> #include <asm/mach/arch.h> #include <asm/mach/flash.h> #include <asm/mach/irq.h> #include <asm/mach/mmc.h> #include <asm/mach/map.h> #include <asm/mach/time.h> #include "common.h" #include "clock.h" #define INTCP_PA_MMC_BASE 0x1c000000 #define INTCP_PA_AACI_BASE 0x1d000000 #define INTCP_PA_FLASH_BASE 0x24000000 #define INTCP_FLASH_SIZE SZ_32M #define INTCP_PA_CLCD_BASE 0xc0000000 #define INTCP_VA_CIC_BASE 0xf1000040 #define INTCP_VA_PIC_BASE 0xf1400000 #define INTCP_VA_SIC_BASE 0xfca00000 #define INTCP_PA_ETH_BASE 0xc8000000 #define INTCP_ETH_SIZE 0x10 #define INTCP_VA_CTRL_BASE 0xfcb00000 #define INTCP_FLASHPROG 0x04 #define CINTEGRATOR_FLASHPROG_FLVPPEN (1 << 0) #define CINTEGRATOR_FLASHPROG_FLWREN (1 << 1) /* * Logical Physical * f1000000 10000000 Core module registers * f1100000 11000000 System controller registers * f1200000 12000000 EBI registers * f1300000 13000000 Counter/Timer * f1400000 14000000 Interrupt controller * f1600000 16000000 UART 0 * f1700000 17000000 UART 1 * f1a00000 1a000000 Debug LEDs * f1b00000 1b000000 GPIO */ static struct map_desc intcp_io_desc[] __initdata = { { .virtual = IO_ADDRESS(INTEGRATOR_HDR_BASE), .pfn = __phys_to_pfn(INTEGRATOR_HDR_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = IO_ADDRESS(INTEGRATOR_SC_BASE), .pfn = __phys_to_pfn(INTEGRATOR_SC_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = IO_ADDRESS(INTEGRATOR_EBI_BASE), .pfn = __phys_to_pfn(INTEGRATOR_EBI_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = IO_ADDRESS(INTEGRATOR_CT_BASE), .pfn = __phys_to_pfn(INTEGRATOR_CT_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = IO_ADDRESS(INTEGRATOR_IC_BASE), .pfn = __phys_to_pfn(INTEGRATOR_IC_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = IO_ADDRESS(INTEGRATOR_UART0_BASE), .pfn = __phys_to_pfn(INTEGRATOR_UART0_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = IO_ADDRESS(INTEGRATOR_UART1_BASE), .pfn = __phys_to_pfn(INTEGRATOR_UART1_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = IO_ADDRESS(INTEGRATOR_DBG_BASE), .pfn = __phys_to_pfn(INTEGRATOR_DBG_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = IO_ADDRESS(INTEGRATOR_GPIO_BASE), .pfn = __phys_to_pfn(INTEGRATOR_GPIO_BASE), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = 0xfca00000, .pfn = __phys_to_pfn(0xca000000), .length = SZ_4K, .type = MT_DEVICE }, { .virtual = 0xfcb00000, .pfn = __phys_to_pfn(0xcb000000), .length = SZ_4K, .type = MT_DEVICE } }; static void __init intcp_map_io(void) { iotable_init(intcp_io_desc, ARRAY_SIZE(intcp_io_desc)); } #define cic_writel __raw_writel #define cic_readl __raw_readl #define pic_writel __raw_writel #define pic_readl __raw_readl #define sic_writel __raw_writel #define sic_readl __raw_readl static void cic_mask_irq(unsigned int irq) { irq -= IRQ_CIC_START; cic_writel(1 << irq, INTCP_VA_CIC_BASE + IRQ_ENABLE_CLEAR); } static void cic_unmask_irq(unsigned int irq) { irq -= IRQ_CIC_START; cic_writel(1 << irq, INTCP_VA_CIC_BASE + IRQ_ENABLE_SET); } static struct irq_chip cic_chip = { .name = "CIC", .ack = cic_mask_irq, .mask = cic_mask_irq, .unmask = cic_unmask_irq, }; static void pic_mask_irq(unsigned int irq) { irq -= IRQ_PIC_START; pic_writel(1 << irq, INTCP_VA_PIC_BASE + IRQ_ENABLE_CLEAR); } static void pic_unmask_irq(unsigned int irq) { irq -= IRQ_PIC_START; pic_writel(1 << irq, INTCP_VA_PIC_BASE + IRQ_ENABLE_SET); } static struct irq_chip pic_chip = { .name = "PIC", .ack = pic_mask_irq, .mask = pic_mask_irq, .unmask = pic_unmask_irq, }; static void sic_mask_irq(unsigned int irq) { irq -= IRQ_SIC_START; sic_writel(1 << irq, INTCP_VA_SIC_BASE + IRQ_ENABLE_CLEAR); } static void sic_unmask_irq(unsigned int irq) { irq -= IRQ_SIC_START; sic_writel(1 << irq, INTCP_VA_SIC_BASE + IRQ_ENABLE_SET); } static struct irq_chip sic_chip = { .name = "SIC", .ack = sic_mask_irq, .mask = sic_mask_irq, .unmask = sic_unmask_irq, }; static void sic_handle_irq(unsigned int irq, struct irq_desc *desc) { unsigned long status = sic_readl(INTCP_VA_SIC_BASE + IRQ_STATUS); if (status == 0) { do_bad_IRQ(irq, desc); return; } do { irq = ffs(status) - 1; status &= ~(1 << irq); irq += IRQ_SIC_START; desc = irq_desc + irq; desc_handle_irq(irq, desc); } while (status); } static void __init intcp_init_irq(void) { unsigned int i; /* * Disable all interrupt sources */ pic_writel(0xffffffff, INTCP_VA_PIC_BASE + IRQ_ENABLE_CLEAR); pic_writel(0xffffffff, INTCP_VA_PIC_BASE + FIQ_ENABLE_CLEAR); for (i = IRQ_PIC_START; i <= IRQ_PIC_END; i++) { if (i == 11) i = 22; if (i == 29) break; set_irq_chip(i, &pic_chip); set_irq_handler(i, handle_level_irq); set_irq_flags(i, IRQF_VALID | IRQF_PROBE); } cic_writel(0xffffffff, INTCP_VA_CIC_BASE + IRQ_ENABLE_CLEAR); cic_writel(0xffffffff, INTCP_VA_CIC_BASE + FIQ_ENABLE_CLEAR); for (i = IRQ_CIC_START; i <= IRQ_CIC_END; i++) { set_irq_chip(i, &cic_chip); set_irq_handler(i, handle_level_irq); set_irq_flags(i, IRQF_VALID); } sic_writel(0x00000fff, INTCP_VA_SIC_BASE + IRQ_ENABLE_CLEAR); sic_writel(0x00000fff, INTCP_VA_SIC_BASE + FIQ_ENABLE_CLEAR); for (i = IRQ_SIC_START; i <= IRQ_SIC_END; i++) { set_irq_chip(i, &sic_chip); set_irq_handler(i, handle_level_irq); set_irq_flags(i, IRQF_VALID | IRQF_PROBE); } set_irq_chained_handler(IRQ_CP_CPPLDINT, sic_handle_irq); } /* * Clock handling */ #define CM_LOCK (IO_ADDRESS(INTEGRATOR_HDR_BASE)+INTEGRATOR_HDR_LOCK_OFFSET) #define CM_AUXOSC (IO_ADDRESS(INTEGRATOR_HDR_BASE)+0x1c) static const struct icst525_params cp_auxvco_params = { .ref = 24000, .vco_max = 320000, .vd_min = 8, .vd_max = 263, .rd_min = 3, .rd_max = 65, }; static void cp_auxvco_set(struct clk *clk, struct icst525_vco vco) { u32 val; val = readl(CM_AUXOSC) & ~0x7ffff; val |= vco.v | (vco.r << 9) | (vco.s << 16); writel(0xa05f, CM_LOCK); writel(val, CM_AUXOSC); writel(0, CM_LOCK); } static struct clk cp_clcd_clk = { .name = "CLCDCLK", .params = &cp_auxvco_params, .setvco = cp_auxvco_set, }; static struct clk cp_mmci_clk = { .name = "MCLK", .rate = 14745600, }; /* * Flash handling. */ static int intcp_flash_init(void) { u32 val; val = readl(INTCP_VA_CTRL_BASE + INTCP_FLASHPROG); val |= CINTEGRATOR_FLASHPROG_FLWREN; writel(val, INTCP_VA_CTRL_BASE + INTCP_FLASHPROG); return 0; } static void intcp_flash_exit(void) { u32 val; val = readl(INTCP_VA_CTRL_BASE + INTCP_FLASHPROG); val &= ~(CINTEGRATOR_FLASHPROG_FLVPPEN|CINTEGRATOR_FLASHPROG_FLWREN); writel(val, INTCP_VA_CTRL_BASE + INTCP_FLASHPROG); } static void intcp_flash_set_vpp(int on) { u32 val; val = readl(INTCP_VA_CTRL_BASE + INTCP_FLASHPROG); if (on) val |= CINTEGRATOR_FLASHPROG_FLVPPEN; else val &= ~CINTEGRATOR_FLASHPROG_FLVPPEN; writel(val, INTCP_VA_CTRL_BASE + INTCP_FLASHPROG); } static struct flash_platform_data intcp_flash_data = { .map_name = "cfi_probe", .width = 4, .init = intcp_flash_init, .exit = intcp_flash_exit, .set_vpp = intcp_flash_set_vpp, }; static struct resource intcp_flash_resource = { .start = INTCP_PA_FLASH_BASE, .end = INTCP_PA_FLASH_BASE + INTCP_FLASH_SIZE - 1, .flags = IORESOURCE_MEM, }; static struct platform_device intcp_flash_device = { .name = "armflash", .id = 0, .dev = { .platform_data = &intcp_flash_data, }, .num_resources = 1, .resource = &intcp_flash_resource, }; static struct resource smc91x_resources[] = { [0] = { .start = INTCP_PA_ETH_BASE, .end = INTCP_PA_ETH_BASE + INTCP_ETH_SIZE - 1, .flags = IORESOURCE_MEM, }, [1] = { .start = IRQ_CP_ETHINT, .end = IRQ_CP_ETHINT, .flags = IORESOURCE_IRQ, }, }; static struct platform_device smc91x_device = { .name = "smc91x", .id = 0, .num_resources = ARRAY_SIZE(smc91x_resources), .resource = smc91x_resources, }; static struct platform_device *intcp_devs[] __initdata = { &intcp_flash_device, &smc91x_device, }; /* * It seems that the card insertion interrupt remains active after * we've acknowledged it. We therefore ignore the interrupt, and * rely on reading it from the SIC. This also means that we must * clear the latched interrupt. */ static unsigned int mmc_status(struct device *dev) { unsigned int status = readl(0xfca00004); writel(8, 0xfcb00008); return status & 8; } static struct mmc_platform_data mmc_data = { .ocr_mask = MMC_VDD_32_33|MMC_VDD_33_34, .status = mmc_status, }; static struct amba_device mmc_device = { .dev = { .bus_id = "mb:1c", .platform_data = &mmc_data, }, .res = { .start = INTCP_PA_MMC_BASE, .end = INTCP_PA_MMC_BASE + SZ_4K - 1, .flags = IORESOURCE_MEM, }, .irq = { IRQ_CP_MMCIINT0, IRQ_CP_MMCIINT1 }, .periphid = 0, }; static struct amba_device aaci_device = { .dev = { .bus_id = "mb:1d", }, .res = { .start = INTCP_PA_AACI_BASE, .end = INTCP_PA_AACI_BASE + SZ_4K - 1, .flags = IORESOURCE_MEM, }, .irq = { IRQ_CP_AACIINT, NO_IRQ }, .periphid = 0, }; /* * CLCD support */ static struct clcd_panel vga = { .mode = { .name = "VGA", .refresh = 60, .xres = 640, .yres = 480, .pixclock = 39721, .left_margin = 40, .right_margin = 24, .upper_margin = 32, .lower_margin = 11, .hsync_len = 96, .vsync_len = 2, .sync = 0, .vmode = FB_VMODE_NONINTERLACED, }, .width = -1, .height = -1, .tim2 = TIM2_BCD | TIM2_IPC, .cntl = CNTL_LCDTFT | CNTL_LCDVCOMP(1), .bpp = 16, .grayscale = 0, }; /* * Ensure VGA is selected. */ static void cp_clcd_enable(struct clcd_fb *fb) { u32 val; if (fb->fb.var.bits_per_pixel <= 8) val = CM_CTRL_LCDMUXSEL_VGA_8421BPP; else if (fb->fb.var.bits_per_pixel <= 16) val = CM_CTRL_LCDMUXSEL_VGA_16BPP | CM_CTRL_LCDEN0 | CM_CTRL_LCDEN1 | CM_CTRL_STATIC1 | CM_CTRL_STATIC2; else val = 0; /* no idea for this, don't trust the docs */ cm_control(CM_CTRL_LCDMUXSEL_MASK| CM_CTRL_LCDEN0| CM_CTRL_LCDEN1| CM_CTRL_STATIC1| CM_CTRL_STATIC2| CM_CTRL_STATIC| CM_CTRL_n24BITEN, val); } static unsigned long framesize = SZ_1M; static int cp_clcd_setup(struct clcd_fb *fb) { dma_addr_t dma; fb->panel = &vga; fb->fb.screen_base = dma_alloc_writecombine(&fb->dev->dev, framesize, &dma, GFP_KERNEL); if (!fb->fb.screen_base) { printk(KERN_ERR "CLCD: unable to map framebuffer\n"); return -ENOMEM; } fb->fb.fix.smem_start = dma; fb->fb.fix.smem_len = framesize; return 0; } static int cp_clcd_mmap(struct clcd_fb *fb, struct vm_area_struct *vma) { return dma_mmap_writecombine(&fb->dev->dev, vma, fb->fb.screen_base, fb->fb.fix.smem_start, fb->fb.fix.smem_len); } static void cp_clcd_remove(struct clcd_fb *fb) { dma_free_writecombine(&fb->dev->dev, fb->fb.fix.smem_len, fb->fb.screen_base, fb->fb.fix.smem_start); } static struct clcd_board clcd_data = { .name = "Integrator/CP", .check = clcdfb_check, .decode = clcdfb_decode, .enable = cp_clcd_enable, .setup = cp_clcd_setup, .mmap = cp_clcd_mmap, .remove = cp_clcd_remove, }; static struct amba_device clcd_device = { .dev = { .bus_id = "mb:c0", .coherent_dma_mask = ~0, .platform_data = &clcd_data, }, .res = { .start = INTCP_PA_CLCD_BASE, .end = INTCP_PA_CLCD_BASE + SZ_4K - 1, .flags = IORESOURCE_MEM, }, .dma_mask = ~0, .irq = { IRQ_CP_CLCDCINT, NO_IRQ }, .periphid = 0, }; static struct amba_device *amba_devs[] __initdata = { &mmc_device, &aaci_device, &clcd_device, }; static void __init intcp_init(void) { int i; clk_register(&cp_clcd_clk); clk_register(&cp_mmci_clk); platform_add_devices(intcp_devs, ARRAY_SIZE(intcp_devs)); for (i = 0; i < ARRAY_SIZE(amba_devs); i++) { struct amba_device *d = amba_devs[i]; amba_device_register(d, &iomem_resource); } } #define TIMER_CTRL_IE (1 << 5) /* Interrupt Enable */ static void __init intcp_timer_init(void) { integrator_time_init(1000000 / HZ, TIMER_CTRL_IE); } static struct sys_timer cp_timer = { .init = intcp_timer_init, .offset = integrator_gettimeoffset, }; MACHINE_START(CINTEGRATOR, "ARM-IntegratorCP") /* Maintainer: ARM Ltd/Deep Blue Solutions Ltd */ .phys_io = 0x16000000, .io_pg_offst = ((0xf1600000) >> 18) & 0xfffc, .boot_params = 0x00000100, .map_io = intcp_map_io, .init_irq = intcp_init_irq, .timer = &cp_timer, .init_machine = intcp_init, MACHINE_END
Close A new research reveals that women with post-traumatic stress disorder (PTSD) are more likely to develop food addiction. Scientists suggest PTSD is a psychiatric disorder, which can develop when a person witnesses a life-threating event or trauma such as war, sexual abuse, serious accident and more. Emergency workers and veterans of war are highly likely to develop PTSD. About 7 to 8 percent of Americans suffer from PTSD at some point in their life and around 5.2 million adults experience PTSD each year in the U.S. Latest study conducted by researchers at the University of Minnesota, School of Public Health, reveals that women who show severe symptoms of PTSD are two times more likely to meet the criteria for food addiction. Susan Mason, an assistant professor in the School of Public Health, who is also the lead author of the study, revealed that they examined over 49,000 women for the study. The participants were asked to complete a questionnaire, which also asked about lifetime symptoms associated with PTSD and linked them with the existence of food addiction. The researchers suggest that women who reported more PTSD symptoms were more likely to have food addiction when measured via the Yale Food Addiction Scale, which assess dependency on food. The researchers suggest that more than 50 percent of the participants were exposed to some type of trauma and about 66 percent women, who experienced trauma, reported to have at least one lifetime PTSD symptom. Around 8 percent of these women also reported to have between 6 and 7 PTSD symptoms, which was the maximum on the questionnaire they had filled. About 8 percent of the total participants were believed to have food addiction. "This prevalence ranged from 6 percent among women with no trauma and no PTSD symptoms to nearly 18 percent among women with trauma and 6-7 PTSD symptoms," per the study. The research also highlighted that women who came across severe PTSD symptoms before the age of 10 years were expected to meet food addiction criteria 3.7 times more when compared to women who did not show any PTSD symptoms. The study authors also suggest that women with no PTSD symptoms did not meet the criteria for food addiction, even though they had some type of trauma at some point in their life. Mason suggests that food addiction can also lead to obesity, which is a major health issue faced by healthcare professionals in the U.S. However, more research is needed to establish a link between PTSD and obesity. The study has been published in the journal JAMA Psychiatry. ⓒ 2018 TECHTIMES.com All rights reserved. Do not reproduce without permission.
def result_type(*arrays_and_dtypes: Union[array, dtype]) -> dtype:
Doing Fence Sitting A growing body of research indicates that the way health care professionals conceptualize mental health might have important clinical implications. We adopted a discursive psychology approach to explore clinical psychologists’ accounts of mental health and its effects. Semistructured interviews were conducted with 11 clinical psychologists in the East Midlands region of the United Kingdom. The participants constructed mental health through building up biological factors and psychosocial aspects as opposite ends of the same spectrum, and then positioned themselves as distant from these extremes to manage issues of stake and accountability. A discourse of moral concern for service users was used to negotiate the implications of having different views of mental health from service users, enabling clinicians to manage issues of accountability and demonstrate their ability to be helpful. This suggests that clinicians should be mindful of the effects of their use of language and make the contingent nature of their knowledge explicit.
/** * Since snapshots need to be uniquely named, this method will resolve any date math used in * the provided name, as well as appending a unique identifier so expressions that may overlap * still result in unique snapshot names. */ public String generateSnapshotName(Context context) { List<String> candidates = DATE_MATH_RESOLVER.resolve(context, Collections.singletonList(this.name)); if (candidates.size() != 1) { throw new IllegalStateException("resolving snapshot name " + this.name + " generated more than one candidate: " + candidates); } return candidates.get(0) + "-" + UUIDs.randomBase64UUID().toLowerCase(Locale.ROOT); }
/** * Node class for this BST Node is only capable of storing RestStop objects as * its data. * * @author Joshua Forlenza * */ private class BSTNode implements Comparable<BSTNode> { RestStop data; int height; BSTNode left; BSTNode right; /** * Creates new BSTNode with specified RestStop data * * @param data RestStop object that the node will store * */ public BSTNode(RestStop data) { this.data = data; } /** * Creates new BSTNode with specified RestStop data, height, and left and right * children. * * @param data RestStop object that the node will store * @param height height of the node * @param left left child of the node * @param right right child of the node * */ public BSTNode(RestStop data, int height, BSTNode left, BSTNode right) { this.data = data; this.height = height; this.left = left; this.right = right; } /** * Compares this object with the specified object for order. * * @param o the object to be compared. * @return a negative integer, zero, or a positive integer as this object is * less than, equal to, or greater than the specified object. */ public int compareTo(BSTNode other) { return this.data.compareTo(other.data); } }
<gh_stars>1-10 import { Compass } from "./compass.ts"; import { assertEquals } from "https://deno.land/std/testing/asserts.ts" Deno.test("Test define and find",()=>{ let c = new Compass(); c.define("/"); c.define("/name_of"); c.define("/name_of/app/compass"); c.define("/name_of/:usrname/info"); c.define("/:docid/meta/:param_name"); c.define("/name_of/department/:dep_code/info"); //@ts-ignore assertEquals(c.path_tree,{ value: "/", parameterized:{ docid:{ plain:{ meta:{ parameterized:{ param_name: {value: "/:docid/meta/:param_name"} } } } } }, plain:{ name_of:{ value: "/name_of", parameterized:{ usrname:{ plain:{ info: {value: "/name_of/:usrname/info"} } } }, plain:{ app:{ plain:{ compass: {value: "/name_of/app/compass"} } }, department:{ parameterized:{ dep_code:{ plain:{ info:{value:"/name_of/department/:dep_code/info"} } } } } } } } }); console.log("testing for paths"); let r0 = c.find("/"); console.log(r0); assertEquals(r0?.matched_pattern,"/"); assertEquals(r0?.parent_matches,[]); let r1 = c.find("/name_of/anurag/info"); console.log(r1); assertEquals(r1!==undefined, true); assertEquals(r1?.matched_pattern,"/name_of/:usrname/info"); assertEquals(r1?.parent_matches,["/", "/name_of"]); let r2 = c.find("123/meta/size"); console.log(r2); assertEquals(r2!==undefined, true); assertEquals(r2?.matched_pattern,"/:docid/meta/:param_name"); assertEquals(r2?.parent_matches,["/"]); let r3 = c.find("/name_of/department/123/info"); console.log(r3); assertEquals(r3!==undefined,true); assertEquals(r3?.matched_pattern,"/name_of/department/:dep_code/info"); assertEquals(r3?.parent_matches,[ "/", "/name_of"]); let r4=c.find("/path/not/defined"); assertEquals(r4,undefined); })
import re from typing import Union, Tuple, Iterator, Optional from rdflib import URIRef, BNode, Literal, Graph # This document assumes an understanding of the ShEx notation and terminology. # # ShapeExpression: a Boolean expression of ShEx shapes. # focus node: a node, potentially in an RDF graph, to be inspected for conformance with a shape expression. # # ShExMap uses the following terms from RDF semantics [rdf11-mt]: # # Node: one of IRI, blank node, Literal. # Graph: a set of Triples of (subject, predicate, object). # We have no idea what is intended in the above definition -- for the moment we'll define it as a function # ShapeExpression = Callable[[List[ShExJ.Shape], bool]] Node = Union[URIRef, BNode, Literal] FocusNode = Node TripleSubject = Union[URIRef, BNode] TriplePredicate = URIRef TripleObject = Union[URIRef, Literal, BNode] Triple = Tuple[TripleSubject, TriplePredicate, TripleObject] class RDFTriple(tuple): def __init__(self, _: Triple) -> None: super().__init__() @property def s(self) -> TripleSubject: return self[0] @property def p(self) -> TriplePredicate: return self[1] @property def o(self) -> TripleObject: return self[2] def __str__(self) -> str: return f"<{self.s}> <{self.p}> {self.o} ." class RDFGraph(set): def __init__(self, ts: Optional[Union[Iterator[RDFTriple], Iterator[Triple]]]=None) -> None: super().__init__([t if isinstance(t, RDFTriple) else RDFTriple(t) for t in ts] if ts is not None else []) def __str__(self) -> str: g = Graph() [g.add((e.s, e.p, e.o)) for e in self] return re.sub(r'^@prefix.*', '', g.serialize(format="turtle").decode(), flags=re.MULTILINE).strip() def add_triples(self, triples: Iterator[Triple]): super().update([RDFTriple(t) for t in triples])
The Sun in Leo forms a royal and powerful “finger of God” aspect. We have a powerful yod or”finger of God” bringing us into the weekend, between Pluto, Neptune and the Sun in Leo, with the Sun in Leo at the apex. The Yod is amping up, the positive effects of the recent New Moon in Leo 2nd August the 2nd August in full force now since and we will feel the effects for the next two weeks. Giving us a rare opportunity to co-create and materialise our plans, projects and dreams. Giving us extra power, strength and determination (Pluto) together with vision, hope and compassion(Neptune)for creating a better life in the now as well as the future. We may well find that our hopes, plans and projects are crowned with success. We have exceptionally good energy this weekend. The moon is in Virgo and makes first of all, an opposition to Neptune in Pisces. Neptune is also in aspect(Inconjunct)to the Sun in Leo. Neptune in Sensitive, empathetic, visionary and mystical Pisces together with big-hearted, magnanimous Sun in Leo, is giving us the chance to open our hearts and let the love flow. As well as having more understanding and empathy for others. The moon is also conjunct mercury in Virgo making it easier to communicate our emotional needs with empathy. It’s a great time to heal any rifts or misunderstanding and have forgiveness for self and others for being less than perfect(Virgo) There is a certain soft, dreamy, romantic and mystical energy in the air which is great for any creative or spiritual pursuits or just for some healthy self-care or with loved ones, relaxation and escapism. The moon later meets up and co-joins together with expansive, optimistic Jupiter. Bringing in the weekend with fun loving and good mood vibes. There is a high enjoyment factor to this aspect, while all undertakings are bestowed with an extra portion of good luck and a higher than usual chance of success. Especially for matters of the heart. Mercury in Virgo is in opposition to Neptune until the end of next week giving us the power of visualisation to envision and believe in a bigger, brighter and better future. And since Neptune dissolves boundaries, we can feel a need to get close to and emerge emotionally with others. We need to be extra vigilant however, as we are more prone than usual to the fog of Neptune’s delusion, deception and addictions which is the shadow side of Neptune. As well as keeping a check on having healthy boundaries. Mars in Sagittarius makes us more idealistic, hopeful and courageous while giving us more drive and optimism in following our goals and visions. Neptune in Pisces is also conjunct the south node of fate, collectively we are being pushed to stepping up our humanitarian values and empathy for one and all. “Nothing is more important than empathy for another human being’s suffering. Nothing. Not a career, not wealth, not intelligence, certainly not status. We have to feel for one another if we’re going to survive with dignity”-Audrey Hepburn. Audrey Hepburn had her natal Moon in Pisces, first house opposite Neptune in Virgo. She was also an Ambassador to Unicef : https://youtu.be/1SvddNIlRaw Vivienne Micallef-Browne
// DuplicateListenerName is checks whether the same thing is set in each listener's name func (l *Listeners) DuplicateListenerName() bool { m := map[string]bool{} for _, v := range *l { if !m[v.Name] { m[v.Name] = true } else { return true } } return false }
High efficiency oxide confined vertical cavity surface emitting lasers Structures based on aluminum-oxide layers have led to dramatic improvements in VCSELs such as power conversion efficiencies in excess of 50% and threshold currents below 10 /spl mu/A. The low index, insulating aluminum-oxide, formed by selective wet thermal oxidation of AlGaAs, serves as an effective index guide as well as a current injection aperture. A substantial amount of design freedom exists with respect to the number and placement of apertures with published structures varying from single apertures to one per mirror period. In this paper we present data on devices with either two aligned apertures above and below the active region or with a single effective aperture above the active region leading to slope efficiencies of up to 1 W/A.
/** prints the header with source file location for an error message using the static message handler */ void SCIPmessagePrintErrorHeader( const char* sourcefile, int sourceline ) { char msg[SCIP_MAXSTRLEN]; (void) snprintf(msg, SCIP_MAXSTRLEN, "[%s:%d] ERROR: ", sourcefile, sourceline); msg[SCIP_MAXSTRLEN-1] = '\0'; messagePrintError(NULL, msg); }
Assessing the risk of airborne spread of foot‐and‐mouth disease: A case study Foot-and-mouth disease (FMD) is a highly infectious viral disease of cloven-hoofed animals, both domestic and wild. In 2001 it had a dramatic effect on the UK farming community and tourism, when the country experienced one of its worst epidemics (Royal Society 2002). Gloster et al. (2004a, b) described the disease, how it spreads and reviewed the part that meteorology played in the epidemic. This paper extends the work recorded in these papers by considering in detail a specific case study and investigates the hypothesis that the airborne virus could have been responsible for the introduction of FMD virus to the south-west of Scotland from the Cumbria area. It also seeks to establish a formal method by which an assessment of risk from airborne introduction can be made in the future. The Department for Environment, Food and Rural Affairs (Defra) is responsible for the control and eradication of incursions of FMD. For this task the mechanisms for disease spread must be clearly understood and the specific circumstances at the time of the incursion established. During the 2001 UK epidemic, Defra field staff investigated in detail the origin and spread of all cases of FMD. In many instances several routes by which virus could have entered a farm were identified. However, in a number of outbreaks it was difficult or impossible to establish the route of entry. Post epidemic studies have helped clarify the situation but still there are some outbreaks for which the route of infection is uncertain. For example, Thrusfield et al. (2005) carried out a study of the characteristics of the 177 outbreaks in Dumfries and Galloway, in which they ascribed probable and possible sources for outbreaks in the various clusters. In some clusters they were unable to identify sources including outbreaks in areas north of the Solway Firth, Kirkconnell (Defra designated cluster – Dumfries South), Rockcliffe (Defra designated cluster – Dalbeattie South), Kirkbean and Whithorn/Sorbie. The location of these towns/clusters is given in Fig. 1 and a detailed map of the widespread outbreaks in the Cumbria and Dumfries and Galloway areas may be found on the Defra website at http://footandmouth.csl.gov.uk/ secure/images/northcumbria.jpg.
/* Copy a component time. This function is provided so that tocomp() can be used for both types. cnew = c.torel([calendar])*/ static PyObject * PyCdComptime_Tocomp(PyCdComptimeObject *self, PyObject *args) { cdCalenType calentype; calentype = GET_CALENDAR; if (!PyArg_ParseTuple(args, "|i", &calentype)) return NULL; return comptime_tocomp(self, calentype); }
<commit_msg>Add ui test of mutable slice return from reference arguments <commit_before>mod ffi { extern "Rust" { type Mut<'a>; } unsafe extern "C++" { type Thing; fn f(t: &Thing) -> Pin<&mut CxxString>; unsafe fn g(t: &Thing) -> Pin<&mut CxxString>; fn h(t: Box<Mut>) -> Pin<&mut CxxString>; fn i<'a>(t: Box<Mut<'a>>) -> Pin<&'a mut CxxString>; } } fn main() {} <commit_after>mod ffi { extern "Rust" { type Mut<'a>; } unsafe extern "C++" { type Thing; fn f(t: &Thing) -> Pin<&mut CxxString>; unsafe fn g(t: &Thing) -> Pin<&mut CxxString>; fn h(t: Box<Mut>) -> Pin<&mut CxxString>; fn i<'a>(t: Box<Mut<'a>>) -> Pin<&'a mut CxxString>; fn j(t: &Thing) -> &mut [u8]; } } fn main() {}
//! The "lobby" actor here maintains a roster of connected clients, and supports a simple messaging //! system for clients to relay messages to each other. use std::collections::HashMap; use actix::prelude::*; use names; use client::{ClientMessage,Roster,RosterClient}; const MAX_CLIENTS: usize = 10; /// A single connected client being tracked in the roster. pub struct ConnectedClient { /// The randomly generated name for the client. pub name: String, /// The address of the client's websocket actor. pub addr: Recipient<ClientMessage>, /// The IP address and port number of the client. pub peer: Option<String>, /// The client's user agent pub user_agent: Option<String>, } /// The lobby actor only needs to maintain a roster of connected clients. pub struct Lobby { pub clients: HashMap<String, ConnectedClient>, } impl Lobby { pub fn new() -> Lobby { Lobby { clients: HashMap::new(), } } /// Build a serializable version of the roster to send to clients. fn roster(&self, name: &str) -> Roster { let mut clients = vec![]; for (_, client) in &self.clients { clients.push(RosterClient { name: client.name.clone(), peer: client.peer.clone(), user_agent: client.user_agent.clone(), }); } clients.sort_by(|a,b| a.name.cmp(&b.name)); Roster { name: name.to_string(), clients, } } /// Send an updated roster to every connected client. This happens when a client connects or /// disconnects. fn broadcast_roster(&self, except: Option<&str>) { for client in self.clients.values() { if let Some(except) = except { if except == client.name { continue; } } match client.addr.do_send(ClientMessage::Roster(self.roster(&client.name))) { Ok(_) => {}, Err(e) => error!("Lobby: Unable to send roster to client {:?}: {}", client.name, e), } } } } impl Actor for Lobby { type Context = Context<Self>; } //////////////////////////////////////////////////////////////////////// /// When a new client connects, this message is sent to the lobby actor. #[derive(Message)] #[rtype(ConnectResponse)] pub struct Connect { pub addr: Recipient<ClientMessage>, pub peer: Option<String>, pub user_agent: Option<String>, } #[derive(Debug)] pub struct ConnectResponse { pub roster: Option<Roster>, } impl<A,M> actix::dev::MessageResponse<A,M> for ConnectResponse where A: Actor, M: Message<Result = ConnectResponse>, { fn handle<R: actix::dev::ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) { if let Some(tx) = tx { tx.send(self); } } } impl Handler<Connect> for Lobby { type Result = ConnectResponse; /// Handle a new client connection. fn handle(&mut self, msg: Connect, _: &mut Context<Self>) -> Self::Result { // Restrict the lobby to a maximum number of clients. if self.clients.len() >= MAX_CLIENTS { return ConnectResponse { roster: None }; } // Create a new random name for this connection. let mut name = names::generate(); let mut count = 0usize; while self.clients.contains_key(&name) { if count > MAX_CLIENTS { // We somehow can't come up with an original name, and don't want to loop forever. return ConnectResponse { roster: None }; } name = names::generate(); count += 1; } // Add the new client to the lobby. self.clients.insert(name.clone(), ConnectedClient { name: name.clone(), addr: msg.addr, peer: msg.peer, user_agent: msg.user_agent, }); // Broadcast the updated roster to all clients self.broadcast_roster(Some(&name)); // Return the current roster to the newly connected client. ConnectResponse { roster: Some(self.roster(&name)) } } } //////////////////////////////////////////////////////////////////////// /// When a client disconnects, this message is sent to the lobby actor. #[derive(Message)] pub struct Disconnect { pub name: String, } impl Handler<Disconnect> for Lobby { type Result = (); /// Handle a client disconnection. fn handle(&mut self, msg: Disconnect, _: &mut Context<Self>) -> Self::Result { info!("Lobby: Disconnecting {:?}.", msg.name); self.clients.remove(&msg.name); // Broadcast the updated roster to all clients self.broadcast_roster(None); } } //////////////////////////////////////////////////////////////////////// /// This message is an envelope for an incoming client message. #[derive(Debug,Message)] pub struct RecvClientMessage { pub name: String, pub message: ClientMessage, } impl Handler<RecvClientMessage> for Lobby { type Result = (); /// Handle messages received from clients. Currently the only supported message type is /// "relay". fn handle(&mut self, msg: RecvClientMessage, _: &mut Context<Self>) -> Self::Result { info!("Lobby recv message: {:?}", msg); let RecvClientMessage { name: mut sender, message } = msg; match message { ClientMessage::Relay(mut relay) => { // Handle relay messages by changing the name to the source, and forwarding the // message onward. // Swap sender and receiver names ::std::mem::swap(&mut relay.name, &mut sender); let receiver = sender; // Send to destination match self.clients.get(&receiver) { Some(client) => { match client.addr.do_send(ClientMessage::Relay(relay)) { Ok(_) => {}, Err(e) => error!("Lobby: Unable to send roster to client {:?}: {}", client.name, e), } }, None => warn!("Cannot relay to unknown client \"{}\".", receiver), }; }, _ => { warn!("Discarding unexpected message from \"{}\": {:?}", sender, message); } } } }
<filename>go/vt/vtctl/vtctl.go // Copyright 2012, Google Inc. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package vtctl import ( "errors" "flag" "fmt" "io/ioutil" "net" "os" "sort" "strings" "time" log "github.com/golang/glog" "github.com/youtube/vitess/go/flagutil" "github.com/youtube/vitess/go/jscfg" "github.com/youtube/vitess/go/vt/client2" hk "github.com/youtube/vitess/go/vt/hook" "github.com/youtube/vitess/go/vt/key" myproto "github.com/youtube/vitess/go/vt/mysqlctl/proto" "github.com/youtube/vitess/go/vt/tabletmanager/actionnode" "github.com/youtube/vitess/go/vt/topo" "github.com/youtube/vitess/go/vt/topotools" "github.com/youtube/vitess/go/vt/wrangler" ) var ( // Error returned for an unknown command ErrUnknownCommand = errors.New("unknown command") ) type command struct { name string method func(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) params string help string // if help is empty, won't list the command } type commandGroup struct { name string commands []command } var commands = []commandGroup{ commandGroup{ "Tablets", []command{ command{"InitTablet", commandInitTablet, "[-force] [-parent] [-update] [-db-name-override=<db name>] [-hostname=<hostname>] [-mysql_port=<port>] [-port=<port>] [-vts_port=<port>] [-keyspace=<keyspace>] [-shard=<shard>] [-parent_alias=<parent alias>] <tablet alias> <tablet type>]", "Initializes a tablet in the topology.\n" + "Valid <tablet type>:\n" + " " + strings.Join(topo.MakeStringTypeList(topo.AllTabletTypes), " ")}, command{"GetTablet", commandGetTablet, "<tablet alias|zk tablet path>", "Outputs the json version of Tablet to stdout."}, command{"UpdateTabletAddrs", commandUpdateTabletAddrs, "[-hostname <hostname>] [-ip-addr <ip addr>] [-mysql-port <mysql port>] [-vt-port <vt port>] [-vts-port <vts port>] <tablet alias|zk tablet path> ", "Updates the addresses of a tablet."}, command{"ScrapTablet", commandScrapTablet, "[-force] [-skip-rebuild] <tablet alias|zk tablet path>", "Scraps a tablet."}, command{"DeleteTablet", commandDeleteTablet, "<tablet alias|zk tablet path> ...", "Deletes scrapped tablet(s) from the topology."}, command{"SetReadOnly", commandSetReadOnly, "[<tablet alias|zk tablet path>]", "Sets the tablet as ReadOnly."}, command{"SetReadWrite", commandSetReadWrite, "[<tablet alias|zk tablet path>]", "Sets the tablet as ReadWrite."}, command{"SetBlacklistedTables", commandSetBlacklistedTables, "[<tablet alias|zk tablet path>] [table1,table2,...]", "Sets the list of blacklisted tables for a tablet. Use no tables to clear the list."}, command{"ChangeSlaveType", commandChangeSlaveType, "[-force] [-dry-run] <tablet alias|zk tablet path> <tablet type>", "Change the db type for this tablet if possible. This is mostly for arranging replicas - it will not convert a master.\n" + "NOTE: This will automatically update the serving graph.\n" + "Valid <tablet type>:\n" + " " + strings.Join(topo.MakeStringTypeList(topo.SlaveTabletTypes), " ")}, command{"Ping", commandPing, "<tablet alias|zk tablet path>", "Check that the agent is awake and responding - can be blocked by other in-flight operations."}, command{"RpcPing", commandRpcPing, "<tablet alias|zk tablet path>", "Check that the agent is awake and responding to RPCs."}, command{"Query", commandQuery, "<cell> <keyspace> <query>", "Send a SQL query to a tablet."}, command{"Sleep", commandSleep, "<tablet alias|zk tablet path> <duration>", "Block the action queue for the specified duration (mostly for testing)."}, command{"Snapshot", commandSnapshot, "[-force] [-server-mode] [-concurrency=4] <tablet alias|zk tablet path>", "Stop mysqld and copy compressed data aside."}, command{"SnapshotSourceEnd", commandSnapshotSourceEnd, "[-slave-start] [-read-write] <tablet alias|zk tablet path> <original tablet type>", "Restart Mysql and restore original server type." + "Valid <tablet type>:\n" + " " + strings.Join(topo.MakeStringTypeList(topo.AllTabletTypes), " ")}, command{"Restore", commandRestore, "[-fetch-concurrency=3] [-fetch-retry-count=3] [-dont-wait-for-slave-start] <src tablet alias|zk src tablet path> <src manifest file> <dst tablet alias|zk dst tablet path> [<zk new master path>]", "Copy the given snaphot from the source tablet and restart replication to the new master path (or uses the <src tablet path> if not specified). If <src manifest file> is 'default', uses the default value.\n" + "NOTE: This does not wait for replication to catch up. The destination tablet must be 'idle' to begin with. It will transition to 'spare' once the restore is complete."}, command{"Clone", commandClone, "[-force] [-concurrency=4] [-fetch-concurrency=3] [-fetch-retry-count=3] [-server-mode] <src tablet alias|zk src tablet path> <dst tablet alias|zk dst tablet path> ...", "This performs Snapshot and then Restore on all the targets in parallel. The advantage of having separate actions is that one snapshot can be used for many restores, and it's then easier to spread them over time."}, command{"MultiSnapshot", commandMultiSnapshot, "[-force] [-concurrency=8] [-skip-slave-restart] [-maximum-file-size=134217728] -spec='-' [-tables=''] [-exclude_tables=''] <tablet alias|zk tablet path>", "Locks mysqld and copy compressed data aside."}, command{"MultiRestore", commandMultiRestore, "[-force] [-concurrency=4] [-fetch-concurrency=4] [-insert-table-concurrency=4] [-fetch-retry-count=3] [-strategy=] <dst tablet alias|destination zk path> <source zk path>...", "Restores a snapshot from multiple hosts."}, command{"ExecuteHook", commandExecuteHook, "<tablet alias|zk tablet path> <hook name> [<param1=value1> <param2=value2> ...]", "This runs the specified hook on the given tablet."}, command{"ReadTabletAction", commandReadTabletAction, "<action path>)", "Displays the action node as json."}, command{"ExecuteFetch", commandExecuteFetch, "[--max_rows=10000] [--want_fields] [--disable_binlogs] <tablet alias|zk tablet path> <sql command>", "Runs the given sql command as a DBA on the remote tablet"}, }, }, commandGroup{ "Shards", []command{ command{"CreateShard", commandCreateShard, "[-force] [-parent] <keyspace/shard|zk shard path>", "Creates the given shard"}, command{"GetShard", commandGetShard, "<keyspace/shard|zk shard path>", "Outputs the json version of Shard to stdout."}, command{"RebuildShardGraph", commandRebuildShardGraph, "[-cells=a,b] <zk shard path> ... (/zk/global/vt/keyspaces/<keyspace>/shards/<shard>)", "Rebuild the replication graph and shard serving data in zk. This may trigger an update to all connected clients."}, command{"ShardExternallyReparented", commandShardExternallyReparented, "<keyspace/shard|zk shard path> <tablet alias|zk tablet path>", "Changes metadata to acknowledge a shard master change performed by an external tool."}, command{"ValidateShard", commandValidateShard, "[-ping-tablets] <keyspace/shard|zk shard path>", "Validate all nodes reachable from this shard are consistent."}, command{"ShardReplicationPositions", commandShardReplicationPositions, "<keyspace/shard|zk shard path>", "Show slave status on all machines in the shard graph."}, command{"ListShardTablets", commandListShardTablets, "<keyspace/shard|zk shard path>)", "List all tablets in a given shard."}, command{"SetShardServedTypes", commandSetShardServedTypes, "<keyspace/shard|zk shard path> [<served type1>,<served type2>,...]", "Sets a given shard's served types. Does not rebuild any serving graph."}, command{"ShardMultiRestore", commandShardMultiRestore, "[-force] [-concurrency=4] [-fetch-concurrency=4] [-insert-table-concurrency=4] [-fetch-retry-count=3] [-strategy=] [-tables=<table1>,<table2>,...] <keyspace/shard|zk shard path> <source zk path>...", "Restore multi-snapshots on all the tablets of a shard."}, command{"ShardReplicationAdd", commandShardReplicationAdd, "<keyspace/shard|zk shard path> <tablet alias|zk tablet path> <parent tablet alias|zk parent tablet path>", "HIDDEN Adds an entry to the replication graph in the given cell"}, command{"ShardReplicationRemove", commandShardReplicationRemove, "<keyspace/shard|zk shard path> <tablet alias|zk tablet path>", "HIDDEN Removes an entry to the replication graph in the given cell"}, command{"ShardReplicationFix", commandShardReplicationFix, "<cell> <keyspace/shard|zk shard path>", "Walks through a ShardReplication object and fixes the first error it encrounters"}, command{"RemoveShardCell", commandRemoveShardCell, "[-force] <keyspace/shard|zk shard path> <cell>", "Removes the cell in the shard's Cells list."}, command{"DeleteShard", commandDeleteShard, "<keyspace/shard|zk shard path> ...", "Deletes the given shard(s)"}, }, }, commandGroup{ "Keyspaces", []command{ command{"CreateKeyspace", commandCreateKeyspace, "[-sharding_column_name=name] [-sharding_column_type=type] [-served-from=tablettype1:ks1,tablettype2,ks2,...] [-force] <keyspace name|zk keyspace path>", "Creates the given keyspace"}, command{"GetKeyspace", commandGetKeyspace, "<keyspace|zk keyspace path>", "Outputs the json version of Keyspace to stdout."}, command{"SetKeyspaceShardingInfo", commandSetKeyspaceShardingInfo, "[-force] <keyspace name|zk keyspace path> [<column name>] [<column type>]", "Updates the sharding info for a keyspace"}, command{"RebuildKeyspaceGraph", commandRebuildKeyspaceGraph, "[-cells=a,b] <zk keyspace path> ... (/zk/global/vt/keyspaces/<keyspace>)", "Rebuild the serving data for all shards in this keyspace. This may trigger an update to all connected clients."}, command{"ValidateKeyspace", commandValidateKeyspace, "[-ping-tablets] <keyspace name|zk keyspace path>", "Validate all nodes reachable from this keyspace are consistent."}, command{"MigrateServedTypes", commandMigrateServedTypes, "[-reverse] [-skip-rebuild] <source keyspace/shard|zk source shard path> <served type>", "Migrates a serving type from the source shard to the shards it replicates to. Will also rebuild the serving graph."}, command{"MigrateServedFrom", commandMigrateServedFrom, "[-reverse] [-skip-rebuild] <destination keyspace/shard|zk destination shard path> <served type>", "Makes the destination keyspace/shard serve the given type. Will also rebuild the serving graph."}, }, }, commandGroup{ "Generic", []command{ command{"WaitForAction", commandWaitForAction, "<action path>", "Watch an action node, printing updates, until the action is complete."}, command{"Resolve", commandResolve, "<keyspace>.<shard>.<db type>:<port name>", "Read a list of addresses that can answer this query. The port name is usually _mysql or _vtocc."}, command{"Validate", commandValidate, "[-ping-tablets]", "Validate all nodes reachable from global replication graph and all tablets in all discoverable cells are consistent."}, command{"RebuildReplicationGraph", commandRebuildReplicationGraph, "<cell1|zk local vt path1>,<cell2|zk local vt path2>... <keyspace1>,<keyspace2>,...", "HIDDEN This takes the Thor's hammer approach of recovery and should only be used in emergencies. cell1,cell2,... are the canonical source of data for the system. This function uses that canonical data to recover the replication graph, at which point further auditing with Validate can reveal any remaining issues."}, command{"ListAllTablets", commandListAllTablets, "<cell name|zk local vt path>", "List all tablets in an awk-friendly way."}, command{"ListTablets", commandListTablets, "<tablet alias|zk tablet path> ...", "List specified tablets in an awk-friendly way."}, }, }, commandGroup{ "Schema, Version, Permissions", []command{ command{"GetSchema", commandGetSchema, "[-tables=<table1>,<table2>,...] [-exclude_tables=<table1>,<table2>,...] [-include-views] <tablet alias|zk tablet path>", "Display the full schema for a tablet, or just the schema for the provided tables."}, command{"ReloadSchema", commandReloadSchema, "<tablet alias|zk tablet path>", "Asks a remote tablet to reload its schema."}, command{"ValidateSchemaShard", commandValidateSchemaShard, "[-exclude_tables=''] [-include-views] <keyspace/shard|zk shard path>", "Validate the master schema matches all the slaves."}, command{"ValidateSchemaKeyspace", commandValidateSchemaKeyspace, "[-exclude_tables=''] [-include-views] <keyspace name|zk keyspace path>", "Validate the master schema from shard 0 matches all the other tablets in the keyspace."}, command{"PreflightSchema", commandPreflightSchema, "{-sql=<sql> || -sql-file=<filename>} <tablet alias|zk tablet path>", "Apply the schema change to a temporary database to gather before and after schema and validate the change. The sql can be inlined or read from a file."}, command{"ApplySchema", commandApplySchema, "[-force] {-sql=<sql> || -sql-file=<filename>} [-skip-preflight] [-stop-replication] <tablet alias|zk tablet path>", "Apply the schema change to the specified tablet (allowing replication by default). The sql can be inlined or read from a file. Note this doesn't change any tablet state (doesn't go into 'schema' type)."}, command{"ApplySchemaShard", commandApplySchemaShard, "[-force] {-sql=<sql> || -sql-file=<filename>} [-simple] [-new-parent=<zk tablet path>] <keyspace/shard|zk shard path>", "Apply the schema change to the specified shard. If simple is specified, we just apply on the live master. Otherwise we will need to do the shell game. So we will apply the schema change to every single slave. if new_parent is set, we will also reparent (otherwise the master won't be touched at all). Using the force flag will cause a bunch of checks to be ignored, use with care."}, command{"ApplySchemaKeyspace", commandApplySchemaKeyspace, "[-force] {-sql=<sql> || -sql-file=<filename>} [-simple] <keyspace|zk keyspace path>", "Apply the schema change to the specified keyspace. If simple is specified, we just apply on the live masters. Otherwise we will need to do the shell game on each shard. So we will apply the schema change to every single slave (running in parallel on all shards, but on one host at a time in a given shard). We will not reparent at the end, so the masters won't be touched at all. Using the force flag will cause a bunch of checks to be ignored, use with care."}, command{"ValidateVersionShard", commandValidateVersionShard, "<keyspace/shard|zk shard path>", "Validate the master version matches all the slaves."}, command{"ValidateVersionKeyspace", commandValidateVersionKeyspace, "<keyspace name|zk keyspace path>", "Validate the master version from shard 0 matches all the other tablets in the keyspace."}, command{"GetPermissions", commandGetPermissions, "<tablet alias|zk tablet path>", "Display the permissions for a tablet."}, command{"ValidatePermissionsShard", commandValidatePermissionsShard, "<keyspace/shard|zk shard path>", "Validate the master permissions match all the slaves."}, command{"ValidatePermissionsKeyspace", commandValidatePermissionsKeyspace, "<keyspace name|zk keyspace path>", "Validate the master permissions from shard 0 match all the other tablets in the keyspace."}, }, }, commandGroup{ "Serving Graph", []command{ command{"GetSrvKeyspace", commandGetSrvKeyspace, "<cell> <keyspace>", "Outputs the json version of SrvKeyspace to stdout."}, command{"GetSrvShard", commandGetSrvShard, "<cell> <keyspace/shard|zk shard path>", "Outputs the json version of SrvShard to stdout."}, command{"GetEndPoints", commandGetEndPoints, "<cell> <keyspace/shard|zk shard path> <tablet type>", "Outputs the json version of EndPoints to stdout."}, }, }, commandGroup{ "Replication Graph", []command{ command{"GetShardReplication", commandGetShardReplication, "<cell> <keyspace/shard|zk shard path>", "Outputs the json version of ShardReplication to stdout."}, }, }, } func addCommand(groupName string, c command) { for i, group := range commands { if group.name == groupName { commands[i].commands = append(commands[i].commands, c) return } } panic(fmt.Errorf("Trying to add to missing group %v", groupName)) } var resolveWildcards = func(wr *wrangler.Wrangler, args []string) ([]string, error) { return args, nil } func fmtMapAwkable(m map[string]string) string { pairs := make([]string, len(m)) i := 0 for k, v := range m { pairs[i] = fmt.Sprintf("%v: %q", k, v) i++ } sort.Strings(pairs) return "[" + strings.Join(pairs, " ") + "]" } func fmtTabletAwkable(ti *topo.TabletInfo) string { keyspace := ti.Keyspace shard := ti.Shard if keyspace == "" { keyspace = "<null>" } if shard == "" { shard = "<null>" } return fmt.Sprintf("%v %v %v %v %v %v %v", ti.Alias, keyspace, shard, ti.Type, ti.Addr(), ti.MysqlAddr(), fmtMapAwkable(ti.Tags)) } func fmtAction(action *actionnode.ActionNode) string { state := string(action.State) // FIXME(msolomon) The default state should really just have the value "queued". if action.State == actionnode.ACTION_STATE_QUEUED { state = "queued" } return fmt.Sprintf("%v %v %v %v %v", action.Path, action.Action, state, action.ActionGuid, action.Error) } func listTabletsByShard(wr *wrangler.Wrangler, keyspace, shard string) error { tabletAliases, err := topo.FindAllTabletAliasesInShard(wr.TopoServer(), keyspace, shard) if err != nil { return err } return dumpTablets(wr, tabletAliases) } func dumpAllTablets(wr *wrangler.Wrangler, zkVtPath string) error { tablets, err := topotools.GetAllTablets(wr.TopoServer(), zkVtPath) if err != nil { return err } for _, ti := range tablets { wr.Logger().Printf("%v\n", fmtTabletAwkable(ti)) } return nil } func dumpTablets(wr *wrangler.Wrangler, tabletAliases []topo.TabletAlias) error { tabletMap, err := topo.GetTabletMap(wr.TopoServer(), tabletAliases) if err != nil { return err } for _, tabletAlias := range tabletAliases { ti, ok := tabletMap[tabletAlias] if !ok { log.Warningf("failed to load tablet %v", tabletAlias) } else { wr.Logger().Printf("%v\n", fmtTabletAwkable(ti)) } } return nil } func kquery(wr *wrangler.Wrangler, cell, keyspace, query string) error { sconn, err := client2.Dial(wr.TopoServer(), cell, keyspace, "master", false, 5*time.Second) if err != nil { return err } rows, err := sconn.Exec(query, nil) if err != nil { return err } cols := rows.Columns() wr.Logger().Printf("%v\n", strings.Join(cols, "\t")) rowStrs := make([]string, len(cols)+1) for row := rows.Next(); row != nil; row = rows.Next() { for i, value := range row { switch value.(type) { case []byte: rowStrs[i] = fmt.Sprintf("%q", value) default: rowStrs[i] = fmt.Sprintf("%v", value) } } wr.Logger().Printf("%v\n", strings.Join(rowStrs, "\t")) } return nil } // getFileParam returns a string containing either flag is not "", // or the content of the file named flagFile func getFileParam(flag, flagFile, name string) (string, error) { if flag != "" { if flagFile != "" { return "", fmt.Errorf("action requires only one of %v or %v-file", name, name) } return flag, nil } if flagFile == "" { return "", fmt.Errorf("action requires one of %v or %v-file", name, name) } data, err := ioutil.ReadFile(flagFile) if err != nil { return "", fmt.Errorf("Cannot read file %v: %v", flagFile, err) } return string(data), nil } func keyspaceParamToKeyspace(param string) (string, error) { if param[0] == '/' { // old zookeeper path, convert to new-style string keyspace zkPathParts := strings.Split(param, "/") if len(zkPathParts) != 6 || zkPathParts[0] != "" || zkPathParts[1] != "zk" || zkPathParts[2] != "global" || zkPathParts[3] != "vt" || zkPathParts[4] != "keyspaces" { return "", fmt.Errorf("Invalid keyspace path: %v", param) } return zkPathParts[5], nil } return param, nil } // keyspaceParamsToKeyspaces builds a list of keyspaces. // It supports topology-based wildcards, and plain wildcards. // For instance: // /zk/global/vt/keyspaces/one // using plugin_zktopo // /zk/global/vt/keyspaces/* // using plugin_zktopo // us* // using plain matching // * // using plain matching func keyspaceParamsToKeyspaces(wr *wrangler.Wrangler, params []string) ([]string, error) { result := make([]string, 0, len(params)) for _, param := range params { if param[0] == '/' { // this is a topology-specific path zkPaths, err := resolveWildcards(wr, params) if err != nil { return nil, fmt.Errorf("Failed to resolve wildcard: %v", err) } for _, zkPath := range zkPaths { subResult, err := keyspaceParamToKeyspace(zkPath) if err != nil { return nil, err } result = append(result, subResult) } } else { // this is not a path, so assume a keyspace name, // possibly with wildcards keyspaces, err := topo.ResolveKeyspaceWildcard(wr.TopoServer(), param) if err != nil { return nil, fmt.Errorf("Failed to resolve keyspace wildcard %v: %v", param, err) } result = append(result, keyspaces...) } } return result, nil } func shardParamToKeyspaceShard(param string) (string, string, error) { if param[0] == '/' { // old zookeeper path, convert to new-style zkPathParts := strings.Split(param, "/") if len(zkPathParts) != 8 || zkPathParts[0] != "" || zkPathParts[1] != "zk" || zkPathParts[2] != "global" || zkPathParts[3] != "vt" || zkPathParts[4] != "keyspaces" || zkPathParts[6] != "shards" { return "", "", fmt.Errorf("Invalid shard path: %v", param) } return zkPathParts[5], zkPathParts[7], nil } zkPathParts := strings.Split(param, "/") if len(zkPathParts) != 2 { return "", "", fmt.Errorf("Invalid shard path: %v", param) } return zkPathParts[0], zkPathParts[1], nil } // shardParamsToKeyspaceShards builds a list of keyspace/shard pairs. // It supports topology-based wildcards, and plain wildcards. // For instance: // /zk/global/vt/keyspaces/*/shards/* // using plugin_zktopo // user/* // using plain matching // */0 // using plain matching func shardParamsToKeyspaceShards(wr *wrangler.Wrangler, params []string) ([]topo.KeyspaceShard, error) { result := make([]topo.KeyspaceShard, 0, len(params)) for _, param := range params { if param[0] == '/' { // this is a topology-specific path zkPaths, err := resolveWildcards(wr, params) if err != nil { return nil, fmt.Errorf("Failed to resolve wildcard: %v", err) } for _, zkPath := range zkPaths { keyspace, shard, err := shardParamToKeyspaceShard(zkPath) if err != nil { return nil, err } result = append(result, topo.KeyspaceShard{Keyspace: keyspace, Shard: shard}) } } else { // this is not a path, so assume a keyspace // name / shard name, each possibly with wildcards keyspaceShards, err := topo.ResolveShardWildcard(wr.TopoServer(), param) if err != nil { return nil, fmt.Errorf("Failed to resolve keyspace/shard wildcard %v: %v", param, err) } result = append(result, keyspaceShards...) } } return result, nil } // tabletParamToTabletAlias takes either an old style ZK tablet path or a // new style tablet alias as a string, and returns a TabletAlias. func tabletParamToTabletAlias(param string) (topo.TabletAlias, error) { if param[0] == '/' { // old zookeeper path, convert to new-style string tablet alias zkPathParts := strings.Split(param, "/") if len(zkPathParts) != 6 || zkPathParts[0] != "" || zkPathParts[1] != "zk" || zkPathParts[3] != "vt" || zkPathParts[4] != "tablets" { return topo.TabletAlias{}, fmt.Errorf("Invalid tablet path: %v", param) } param = zkPathParts[2] + "-" + zkPathParts[5] } result, err := topo.ParseTabletAliasString(param) if err != nil { return topo.TabletAlias{}, fmt.Errorf("Invalid tablet alias %v: %v", param, err) } return result, nil } // tabletParamsToTabletAliases takes multiple params and converts them // to tablet aliases. func tabletParamsToTabletAliases(params []string) ([]topo.TabletAlias, error) { result := make([]topo.TabletAlias, len(params)) var err error for i, param := range params { result[i], err = tabletParamToTabletAlias(param) if err != nil { return nil, err } } return result, nil } // tabletRepParamToTabletAlias takes either an old style ZK tablet replication // path or a new style tablet alias as a string, and returns a // TabletAlias. func tabletRepParamToTabletAlias(param string) (topo.TabletAlias, error) { if param[0] == '/' { // old zookeeper replication path, e.g. // /zk/global/vt/keyspaces/ruser/shards/10-20/nyc-0000200278 // convert to new-style string tablet alias zkPathParts := strings.Split(param, "/") if len(zkPathParts) != 9 || zkPathParts[0] != "" || zkPathParts[1] != "zk" || zkPathParts[2] != "global" || zkPathParts[3] != "vt" || zkPathParts[4] != "keyspaces" || zkPathParts[6] != "shards" { return topo.TabletAlias{}, fmt.Errorf("Invalid tablet replication path: %v", param) } param = zkPathParts[8] } result, err := topo.ParseTabletAliasString(param) if err != nil { return topo.TabletAlias{}, fmt.Errorf("Invalid tablet alias %v: %v", param, err) } return result, nil } // vtPathToCell takes either an old style ZK vt path /zk/<cell>/vt or // a new style cell and returns the cell name func vtPathToCell(param string) (string, error) { if param[0] == '/' { // old zookeeper replication path like /zk/<cell>/vt zkPathParts := strings.Split(param, "/") if len(zkPathParts) != 4 || zkPathParts[0] != "" || zkPathParts[1] != "zk" || zkPathParts[3] != "vt" { return "", fmt.Errorf("Invalid vt path: %v", param) } return zkPathParts[2], nil } return param, nil } // parseTabletType parses the string tablet type and verifies // it is an accepted one func parseTabletType(param string, types []topo.TabletType) (topo.TabletType, error) { tabletType := topo.TabletType(param) if !topo.IsTypeInList(tabletType, types) { return "", fmt.Errorf("Type %v is not one of: %v", tabletType, strings.Join(topo.MakeStringTypeList(types), " ")) } return tabletType, nil } func commandInitTablet(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { var ( dbNameOverride = subFlags.String("db-name-override", "", "override the name of the db used by vttablet") force = subFlags.Bool("force", false, "will overwrite the node if it already exists") parent = subFlags.Bool("parent", false, "will create the parent shard and keyspace if they don't exist yet") update = subFlags.Bool("update", false, "perform update if a tablet with provided alias exists") hostname = subFlags.String("hostname", "", "server the tablet is running on") mysqlPort = subFlags.Int("mysql_port", 0, "mysql port for the mysql daemon") port = subFlags.Int("port", 0, "main port for the vttablet process") vtsPort = subFlags.Int("vts_port", 0, "encrypted port for the vttablet process") keyspace = subFlags.String("keyspace", "", "keyspace this tablet belongs to") shard = subFlags.String("shard", "", "shard this tablet belongs to") parentAlias = subFlags.String("parent_alias", "", "alias of the mysql parent tablet for this tablet") tags flagutil.StringMapValue ) subFlags.Var(&tags, "tags", "comma separated list of key:value pairs used to tag the tablet") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action InitTablet requires <tablet alias> <tablet type>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } tabletType, err := parseTabletType(subFlags.Arg(1), topo.AllTabletTypes) if err != nil { return "", err } // create tablet record tablet := &topo.Tablet{ Alias: tabletAlias, Hostname: *hostname, Portmap: make(map[string]int), Keyspace: *keyspace, Shard: *shard, Type: tabletType, DbNameOverride: *dbNameOverride, Tags: tags, } if *port != 0 { tablet.Portmap["vt"] = *port } if *mysqlPort != 0 { tablet.Portmap["mysql"] = *mysqlPort } if *vtsPort != 0 { tablet.Portmap["vts"] = *vtsPort } if *parentAlias != "" { tablet.Parent, err = tabletRepParamToTabletAlias(*parentAlias) if err != nil { return "", err } } return "", wr.InitTablet(tablet, *force, *parent, *update) } func commandGetTablet(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action GetTablet requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } tabletInfo, err := wr.TopoServer().GetTablet(tabletAlias) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(tabletInfo)) } return "", err } func commandUpdateTabletAddrs(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { hostname := subFlags.String("hostname", "", "fully qualified host name") ipAddr := subFlags.String("ip-addr", "", "IP address") mysqlPort := subFlags.Int("mysql-port", 0, "mysql port") vtPort := subFlags.Int("vt-port", 0, "vt port") vtsPort := subFlags.Int("vts-port", 0, "vts port") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action UpdateTabletAddrs requires <tablet alias|zk tablet path>") } if *ipAddr != "" && net.ParseIP(*ipAddr) == nil { return "", fmt.Errorf("malformed address: %v", *ipAddr) } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.TopoServer().UpdateTabletFields(tabletAlias, func(tablet *topo.Tablet) error { if *hostname != "" { tablet.Hostname = *hostname } if *ipAddr != "" { tablet.IPAddr = *ipAddr } if *vtPort != 0 || *vtsPort != 0 || *mysqlPort != 0 { if tablet.Portmap == nil { tablet.Portmap = make(map[string]int) } if *vtPort != 0 { tablet.Portmap["vt"] = *vtPort } if *vtsPort != 0 { tablet.Portmap["vts"] = *vtsPort } if *mysqlPort != 0 { tablet.Portmap["mysql"] = *mysqlPort } } return nil }) } func commandScrapTablet(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "writes the scrap state in to zk, no questions asked, if a tablet is offline") skipRebuild := subFlags.Bool("skip-rebuild", false, "do not rebuild the shard and keyspace graph after scrapping") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ScrapTablet requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } return wr.Scrap(tabletAlias, *force, *skipRebuild) } func commandDeleteTablet(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() == 0 { return "", fmt.Errorf("action DeleteTablet requires at least one <tablet alias|zk tablet path> ...") } tabletAliases, err := tabletParamsToTabletAliases(subFlags.Args()) if err != nil { return "", err } for _, tabletAlias := range tabletAliases { if err := wr.DeleteTablet(tabletAlias); err != nil { return "", err } } return "", nil } func commandSetReadOnly(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action SetReadOnly requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } return wr.ActionInitiator().SetReadOnly(tabletAlias) } func commandSetReadWrite(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action SetReadWrite requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } return wr.ActionInitiator().SetReadWrite(tabletAlias) } func commandSetBlacklistedTables(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 && subFlags.NArg() != 2 { return "", fmt.Errorf("action SetBlacklistedTables requires <tablet alias|zk tablet path> [table1,table2,...]") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } var tables []string if subFlags.NArg() == 2 { tables = strings.Split(subFlags.Arg(1), ",") } ti, err := wr.TopoServer().GetTablet(tabletAlias) if err != nil { return "", fmt.Errorf("failed reading tablet %v: %v", tabletAlias, err) } return "", wr.ActionInitiator().SetBlacklistedTables(ti, tables, wr.ActionTimeout()) } func commandChangeSlaveType(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will change the type in zookeeper, and not run hooks") dryRun := subFlags.Bool("dry-run", false, "just list the proposed change") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action ChangeSlaveType requires <zk tablet path> <db type>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } newType, err := parseTabletType(subFlags.Arg(1), topo.AllTabletTypes) if err != nil { return "", err } if *dryRun { ti, err := wr.TopoServer().GetTablet(tabletAlias) if err != nil { return "", fmt.Errorf("failed reading tablet %v: %v", tabletAlias, err) } if !topo.IsTrivialTypeChange(ti.Type, newType) || !topo.IsValidTypeChange(ti.Type, newType) { return "", fmt.Errorf("invalid type transition %v: %v -> %v", tabletAlias, ti.Type, newType) } wr.Logger().Printf("- %v\n", fmtTabletAwkable(ti)) ti.Type = newType wr.Logger().Printf("+ %v\n", fmtTabletAwkable(ti)) return "", nil } return "", wr.ChangeType(tabletAlias, newType, *force) } func commandPing(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action Ping requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } return wr.ActionInitiator().Ping(tabletAlias) } func commandRpcPing(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action Ping requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.ActionInitiator().RpcPing(tabletAlias, wr.ActionTimeout()) } func commandQuery(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 3 { return "", fmt.Errorf("action Query requires 3") } return "", kquery(wr, subFlags.Arg(0), subFlags.Arg(1), subFlags.Arg(2)) } func commandSleep(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action Sleep requires <tablet alias|zk tablet path> <duration>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } duration, err := time.ParseDuration(subFlags.Arg(1)) if err != nil { return "", err } return wr.ActionInitiator().Sleep(tabletAlias, duration) } func commandSnapshotSourceEnd(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { slaveStartRequired := subFlags.Bool("slave-start", false, "will restart replication") readWrite := subFlags.Bool("read-write", false, "will make the server read-write") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action SnapshotSourceEnd requires <tablet alias|zk tablet path> <original server type>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } tabletType, err := parseTabletType(subFlags.Arg(1), topo.AllTabletTypes) if err != nil { return "", err } return "", wr.SnapshotSourceEnd(tabletAlias, *slaveStartRequired, !(*readWrite), tabletType) } func commandSnapshot(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will force the snapshot for a master, and turn it into a backup") serverMode := subFlags.Bool("server-mode", false, "will symlink the data files and leave mysqld stopped") concurrency := subFlags.Int("concurrency", 4, "how many compression/checksum jobs to run simultaneously") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action Snapshot requires <tablet alias|zk src tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } filename, parentAlias, slaveStartRequired, readOnly, originalType, err := wr.Snapshot(tabletAlias, *force, *concurrency, *serverMode) if err == nil { log.Infof("Manifest: %v", filename) log.Infof("ParentAlias: %v", parentAlias) if *serverMode { log.Infof("SlaveStartRequired: %v", slaveStartRequired) log.Infof("ReadOnly: %v", readOnly) log.Infof("OriginalType: %v", originalType) } } return "", err } func commandRestore(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { dontWaitForSlaveStart := subFlags.Bool("dont-wait-for-slave-start", false, "won't wait for replication to start (useful when restoring from snapshot source that is the replication master)") fetchConcurrency := subFlags.Int("fetch-concurrency", 3, "how many files to fetch simultaneously") fetchRetryCount := subFlags.Int("fetch-retry-count", 3, "how many times to retry a failed transfer") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 3 && subFlags.NArg() != 4 { return "", fmt.Errorf("action Restore requires <src tablet alias|zk src tablet path> <src manifest path> <dst tablet alias|zk dst tablet path> [<zk new master path>]") } srcTabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } dstTabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(2)) if err != nil { return "", err } parentAlias := srcTabletAlias if subFlags.NArg() == 4 { parentAlias, err = tabletParamToTabletAlias(subFlags.Arg(3)) if err != nil { return "", err } } return "", wr.Restore(srcTabletAlias, subFlags.Arg(1), dstTabletAlias, parentAlias, *fetchConcurrency, *fetchRetryCount, false, *dontWaitForSlaveStart) } func commandClone(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will force the snapshot for a master, and turn it into a backup") concurrency := subFlags.Int("concurrency", 4, "how many compression/checksum jobs to run simultaneously") fetchConcurrency := subFlags.Int("fetch-concurrency", 3, "how many files to fetch simultaneously") fetchRetryCount := subFlags.Int("fetch-retry-count", 3, "how many times to retry a failed transfer") serverMode := subFlags.Bool("server-mode", false, "will keep the snapshot server offline to serve DB files directly") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() < 2 { return "", fmt.Errorf("action Clone requires <src tablet alias|zk src tablet path> <dst tablet alias|zk dst tablet path> ...") } srcTabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } dstTabletAliases := make([]topo.TabletAlias, subFlags.NArg()-1) for i := 1; i < subFlags.NArg(); i++ { dstTabletAliases[i-1], err = tabletParamToTabletAlias(subFlags.Arg(i)) if err != nil { return "", err } } return "", wr.Clone(srcTabletAlias, dstTabletAliases, *force, *concurrency, *fetchConcurrency, *fetchRetryCount, *serverMode) } func commandMultiRestore(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (status string, err error) { fetchRetryCount := subFlags.Int("fetch-retry-count", 3, "how many times to retry a failed transfer") concurrency := subFlags.Int("concurrency", 8, "how many concurrent jobs to run simultaneously") fetchConcurrency := subFlags.Int("fetch-concurrency", 4, "how many files to fetch simultaneously") insertTableConcurrency := subFlags.Int("insert-table-concurrency", 4, "how many tables to load into a single destination table simultaneously") strategy := subFlags.String("strategy", "", "which strategy to use for restore, use 'mysqlctl multirestore -help' for more info") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() < 2 { return "", fmt.Errorf("MultiRestore requires <dst tablet alias|destination zk path> <source tablet alias|source zk path>... %v", args) } destination, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } sources := make([]topo.TabletAlias, subFlags.NArg()-1) for i := 1; i < subFlags.NArg(); i++ { sources[i-1], err = tabletParamToTabletAlias(subFlags.Arg(i)) if err != nil { return "", err } } err = wr.MultiRestore(destination, sources, *concurrency, *fetchConcurrency, *insertTableConcurrency, *fetchRetryCount, *strategy) return } func commandMultiSnapshot(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will force the snapshot for a master, and turn it into a backup") concurrency := subFlags.Int("concurrency", 8, "how many compression jobs to run simultaneously") spec := subFlags.String("spec", "-", "shard specification") tablesString := subFlags.String("tables", "", "dump only this comma separated list of table regexp") excludeTablesString := subFlags.String("exclude_tables", "", "comma separated list of regexps for tables to exclude") skipSlaveRestart := subFlags.Bool("skip-slave-restart", false, "after the snapshot is done, do not restart slave replication") maximumFilesize := subFlags.Uint64("maximum-file-size", 128*1024*1024, "the maximum size for an uncompressed data file") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action MultiSnapshot requires <src tablet alias|zk src tablet path>") } shards, err := key.ParseShardingSpec(*spec) if err != nil { return "", fmt.Errorf("multisnapshot failed: %v", err) } var tables []string if *tablesString != "" { tables = strings.Split(*tablesString, ",") } var excludeTables []string if *excludeTablesString != "" { excludeTables = strings.Split(*excludeTablesString, ",") } source, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } filenames, parentAlias, err := wr.MultiSnapshot(shards, source, *concurrency, tables, excludeTables, *force, *skipSlaveRestart, *maximumFilesize) if err == nil { log.Infof("manifest locations: %v", filenames) log.Infof("ParentAlias: %v", parentAlias) } return "", err } func commandReadTabletAction(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ReadTabletAction requires <action path>") } actionPath := subFlags.Arg(0) _, data, _, err := wr.TopoServer().ReadTabletActionPath(actionPath) if err == nil { actionNode, err := actionnode.ActionNodeFromJson(data, actionPath) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(actionNode)) } } return "", err } func commandExecuteFetch(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { maxRows := subFlags.Int("max_rows", 10000, "maximum number of rows to allow in reset") wantFields := subFlags.Bool("want_fields", false, "also get the field names") disableBinlogs := subFlags.Bool("disable_binlogs", false, "disable writing to binlogs during the query") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action ReadTabletAction requires <tablet alias|zk tablet path> <sql command>") } alias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } query := subFlags.Arg(1) qr, err := wr.ExecuteFetch(alias, query, *maxRows, *wantFields, *disableBinlogs) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(qr)) } return "", err } func commandExecuteHook(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() < 2 { return "", fmt.Errorf("action ExecuteHook requires <tablet alias|zk tablet path> <hook name>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } hook := &hk.Hook{Name: subFlags.Arg(1), Parameters: subFlags.Args()[2:]} hr, err := wr.ExecuteHook(tabletAlias, hook) if err == nil { log.Infof(hr.String()) } return "", err } func commandCreateShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will keep going even if the keyspace already exists") parent := subFlags.Bool("parent", false, "creates the parent keyspace if it doesn't exist") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action CreateShard requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } if *parent { if err := wr.TopoServer().CreateKeyspace(keyspace, &topo.Keyspace{}); err != nil && err != topo.ErrNodeExists { return "", err } } err = topo.CreateShard(wr.TopoServer(), keyspace, shard) if *force && err == topo.ErrNodeExists { log.Infof("shard %v/%v already exists (ignoring error with -force)", keyspace, shard) err = nil } return "", err } func commandGetShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action GetShard requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } shardInfo, err := wr.TopoServer().GetShard(keyspace, shard) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(shardInfo)) } return "", err } func commandRebuildShardGraph(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { cells := subFlags.String("cells", "", "comma separated list of cells to update") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() == 0 { return "", fmt.Errorf("action RebuildShardGraph requires at least one <zk shard path>") } var cellArray []string if *cells != "" { cellArray = strings.Split(*cells, ",") } keyspaceShards, err := shardParamsToKeyspaceShards(wr, subFlags.Args()) if err != nil { return "", err } for _, ks := range keyspaceShards { if err := wr.RebuildShardGraph(ks.Keyspace, ks.Shard, cellArray); err != nil { return "", err } } return "", nil } func commandShardExternallyReparented(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action ShardExternallyReparented requires <keyspace/shard|zk shard path> <tablet alias|zk tablet path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(1)) if err != nil { return "", err } return "", wr.ShardExternallyReparented(keyspace, shard, tabletAlias) } func commandValidateShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { pingTablets := subFlags.Bool("ping-tablets", true, "ping all tablets during validate") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ValidateShard requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.ValidateShard(keyspace, shard, *pingTablets) } func commandShardReplicationPositions(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ShardReplicationPositions requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } tablets, positions, err := wr.ShardReplicationPositions(keyspace, shard) if tablets == nil { return "", err } lines := make([]string, 0, 24) for _, rt := range sortReplicatingTablets(tablets, positions) { pos := rt.ReplicationPosition ti := rt.TabletInfo if pos == nil { lines = append(lines, fmtTabletAwkable(ti)+" <err> <err> <err>") } else { lines = append(lines, fmtTabletAwkable(ti)+fmt.Sprintf(" %v:%010d %v:%010d %v", pos.MasterLogFile, pos.MasterLogPosition, pos.MasterLogFileIo, pos.MasterLogPositionIo, pos.SecondsBehindMaster)) } } for _, l := range lines { wr.Logger().Printf("%v\n", l) } return "", nil } func commandListShardTablets(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ListShardTablets requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } return "", listTabletsByShard(wr, keyspace, shard) } func commandSetShardServedTypes(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 && subFlags.NArg() != 2 { return "", fmt.Errorf("action SetShardServedTypes requires <keyspace/shard|zk shard path> [<served type1>,<served type2>,...]") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } var servedTypes []topo.TabletType if subFlags.NArg() == 2 { types := strings.Split(subFlags.Arg(1), ",") servedTypes = make([]topo.TabletType, 0, len(types)) for _, t := range types { tt, err := parseTabletType(t, []topo.TabletType{topo.TYPE_MASTER, topo.TYPE_REPLICA, topo.TYPE_RDONLY}) if err != nil { return "", err } servedTypes = append(servedTypes, tt) } } return "", wr.SetShardServedTypes(keyspace, shard, servedTypes) } func commandShardMultiRestore(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (status string, err error) { fetchRetryCount := subFlags.Int("fetch-retry-count", 3, "how many times to retry a failed transfer") concurrency := subFlags.Int("concurrency", 8, "how many concurrent jobs to run simultaneously") fetchConcurrency := subFlags.Int("fetch-concurrency", 4, "how many files to fetch simultaneously") insertTableConcurrency := subFlags.Int("insert-table-concurrency", 4, "how many tables to load into a single destination table simultaneously") strategy := subFlags.String("strategy", "", "which strategy to use for restore, use 'mysqlctl multirestore -help' for more info") tables := subFlags.String("tables", "", "comma separated list of tables to replicate (used for vertical split)") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() < 2 { return "", fmt.Errorf("ShardMultiRestore requires <keyspace/shard|zk shard path> <source tablet alias|source zk path>... %v", args) } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } sources := make([]topo.TabletAlias, subFlags.NArg()-1) for i := 1; i < subFlags.NArg(); i++ { sources[i-1], err = tabletParamToTabletAlias(subFlags.Arg(i)) if err != nil { return "", err } } var tableArray []string if *tables != "" { tableArray = strings.Split(*tables, ",") } err = wr.ShardMultiRestore(keyspace, shard, sources, tableArray, *concurrency, *fetchConcurrency, *insertTableConcurrency, *fetchRetryCount, *strategy) return } func commandShardReplicationAdd(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (status string, err error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 3 { return "", fmt.Errorf("action ShardReplicationAdd requires <keyspace/shard|zk shard path> <tablet alias|zk tablet path> <parent tablet alias|zk parent tablet path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(1)) if err != nil { return "", err } parentAlias, err := tabletParamToTabletAlias(subFlags.Arg(2)) if err != nil { return "", err } return "", topo.AddShardReplicationRecord(wr.TopoServer(), keyspace, shard, tabletAlias, parentAlias) } func commandShardReplicationRemove(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (status string, err error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action ShardReplicationRemove requires <keyspace/shard|zk shard path> <tablet alias|zk tablet path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(1)) if err != nil { return "", err } return "", topo.RemoveShardReplicationRecord(wr.TopoServer(), keyspace, shard, tabletAlias) } func commandShardReplicationFix(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (status string, err error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action ShardReplicationRemove requires <cell> <keyspace/shard|zk shard path>") } cell := subFlags.Arg(0) keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(1)) if err != nil { return "", err } return "", topo.FixShardReplication(wr.TopoServer(), cell, keyspace, shard) } func commandRemoveShardCell(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (status string, err error) { force := subFlags.Bool("force", false, "will keep going even we can't reach the cell's topology server to check for tablets") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action RemoveShardCell requires <keyspace/shard|zk shard path> <cell>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.RemoveShardCell(keyspace, shard, subFlags.Arg(1), *force) } func commandDeleteShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (status string, err error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() == 0 { return "", fmt.Errorf("action DeleteShard requires <keyspace/shard|zk shard path> ...") } keyspaceShards, err := shardParamsToKeyspaceShards(wr, subFlags.Args()) if err != nil { return "", err } for _, ks := range keyspaceShards { err := wr.DeleteShard(ks.Keyspace, ks.Shard) switch err { case nil: // keep going case topo.ErrNoNode: log.Infof("Shard %v/%v doesn't exist, skipping it", ks.Keyspace, ks.Shard) default: return "", err } } return "", nil } func commandCreateKeyspace(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { shardingColumnName := subFlags.String("sharding_column_name", "", "column to use for sharding operations") shardingColumnType := subFlags.String("sharding_column_type", "", "type of the column to use for sharding operations") force := subFlags.Bool("force", false, "will keep going even if the keyspace already exists") var servedFrom flagutil.StringMapValue subFlags.Var(&servedFrom, "served-from", "comma separated list of dbtype:keyspace pairs used to serve traffic") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action CreateKeyspace requires <keyspace name|zk keyspace path>") } keyspace, err := keyspaceParamToKeyspace(subFlags.Arg(0)) if err != nil { return "", err } kit := key.KeyspaceIdType(*shardingColumnType) if !key.IsKeyspaceIdTypeInList(kit, key.AllKeyspaceIdTypes) { return "", fmt.Errorf("invalid sharding_column_type") } ki := &topo.Keyspace{ ShardingColumnName: *shardingColumnName, ShardingColumnType: kit, } if len(servedFrom) > 0 { ki.ServedFrom = make(map[topo.TabletType]string, len(servedFrom)) for name, value := range servedFrom { tt := topo.TabletType(name) if !topo.IsInServingGraph(tt) { return "", fmt.Errorf("Cannot use tablet type that is not in serving graph: %v", tt) } ki.ServedFrom[tt] = value } } err = wr.TopoServer().CreateKeyspace(keyspace, ki) if *force && err == topo.ErrNodeExists { log.Infof("keyspace %v already exists (ignoring error with -force)", keyspace) err = nil } return "", err } func commandGetKeyspace(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action GetKeyspace requires <keyspace|zk keyspace path>") } keyspace, err := keyspaceParamToKeyspace(subFlags.Arg(0)) if err != nil { return "", err } keyspaceInfo, err := wr.TopoServer().GetKeyspace(keyspace) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(keyspaceInfo)) } return "", err } func commandSetKeyspaceShardingInfo(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will update the fields even if they're already set, use with care") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() > 3 || subFlags.NArg() < 1 { return "", fmt.Errorf("action SetKeyspaceShardingInfo requires <keyspace name|zk keyspace path> [<column name>] [<column type>]") } keyspace, err := keyspaceParamToKeyspace(subFlags.Arg(0)) if err != nil { return "", err } columnName := "" if subFlags.NArg() >= 2 { columnName = subFlags.Arg(1) } kit := key.KIT_UNSET if subFlags.NArg() >= 3 { kit = key.KeyspaceIdType(subFlags.Arg(2)) if !key.IsKeyspaceIdTypeInList(kit, key.AllKeyspaceIdTypes) { return "", fmt.Errorf("invalid sharding_column_type") } } return "", wr.SetKeyspaceShardingInfo(keyspace, columnName, kit, *force) } func commandRebuildKeyspaceGraph(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { cells := subFlags.String("cells", "", "comma separated list of cells to update") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() == 0 { return "", fmt.Errorf("action RebuildKeyspaceGraph requires at least one <zk keyspace path>") } var cellArray []string if *cells != "" { cellArray = strings.Split(*cells, ",") } keyspaces, err := keyspaceParamsToKeyspaces(wr, subFlags.Args()) if err != nil { return "", err } for _, keyspace := range keyspaces { if err := wr.RebuildKeyspaceGraph(keyspace, cellArray, nil); err != nil { return "", err } } return "", nil } func commandValidateKeyspace(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { pingTablets := subFlags.Bool("ping-tablets", false, "ping all tablets during validate") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ValidateKeyspace requires <keyspace name|zk keyspace path>") } keyspace, err := keyspaceParamToKeyspace(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.ValidateKeyspace(keyspace, *pingTablets) } func commandMigrateServedTypes(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { reverse := subFlags.Bool("reverse", false, "move the served type back instead of forward, use in case of trouble") skipRebuild := subFlags.Bool("skip-rebuild", false, "do not rebuild the shard and keyspace graph after the migration (replica and rdonly only)") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action MigrateServedTypes requires <source keyspace/shard|zk source shard path> <served type>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } servedType, err := parseTabletType(subFlags.Arg(1), []topo.TabletType{topo.TYPE_MASTER, topo.TYPE_REPLICA, topo.TYPE_RDONLY}) if err != nil { return "", err } if servedType == topo.TYPE_MASTER && *skipRebuild { return "", fmt.Errorf("can only specify skip-rebuild for non-master migrations") } return "", wr.MigrateServedTypes(keyspace, shard, servedType, *reverse, *skipRebuild) } func commandMigrateServedFrom(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { reverse := subFlags.Bool("reverse", false, "move the served from back instead of forward, use in case of trouble") skipRebuild := subFlags.Bool("skip-rebuild", false, "do not rebuild the shard and keyspace graph after the migration (replica and rdonly only)") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action MigrateServedFrom requires <destination keyspace/shard|zk source shard path> <served type>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } servedType, err := parseTabletType(subFlags.Arg(1), []topo.TabletType{topo.TYPE_MASTER, topo.TYPE_REPLICA, topo.TYPE_RDONLY}) if err != nil { return "", err } if servedType == topo.TYPE_MASTER && *skipRebuild { return "", fmt.Errorf("can only specify skip-rebuild for non-master migrations") } return "", wr.MigrateServedFrom(keyspace, shard, servedType, *reverse, *skipRebuild) } func commandWaitForAction(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action WaitForAction requires <action path>") } return subFlags.Arg(0), nil } func commandResolve(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action Resolve requires <keyspace>.<shard>.<db type>:<port name>") } parts := strings.Split(subFlags.Arg(0), ":") if len(parts) != 2 { return "", fmt.Errorf("action Resolve requires <keyspace>.<shard>.<db type>:<port name>") } namedPort := parts[1] parts = strings.Split(parts[0], ".") if len(parts) != 3 { return "", fmt.Errorf("action Resolve requires <keyspace>.<shard>.<db type>:<port name>") } tabletType, err := parseTabletType(parts[2], topo.AllTabletTypes) if err != nil { return "", err } addrs, err := topo.LookupVtName(wr.TopoServer(), "local", parts[0], parts[1], tabletType, namedPort) if err != nil { return "", err } for _, addr := range addrs { wr.Logger().Printf("%v:%v\n", addr.Target, addr.Port) } return "", nil } func commandValidate(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { pingTablets := subFlags.Bool("ping-tablets", false, "ping all tablets during validate") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 0 { log.Warningf("action Validate doesn't take any parameter any more") } return "", wr.Validate(*pingTablets) } func commandRebuildReplicationGraph(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { // This is sort of a nuclear option. if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() < 2 { return "", fmt.Errorf("action RebuildReplicationGraph requires <cell1>,<cell2>,... <keyspace1>,<keyspace2>...") } cellParams := strings.Split(subFlags.Arg(0), ",") resolvedCells, err := resolveWildcards(wr, cellParams) if err != nil { return "", err } cells := make([]string, 0, len(cellParams)) for _, cell := range resolvedCells { c, err := vtPathToCell(cell) if err != nil { return "", err } cells = append(cells, c) } keyspaceParams := strings.Split(subFlags.Arg(1), ",") keyspaces, err := keyspaceParamsToKeyspaces(wr, keyspaceParams) if err != nil { return "", err } return "", wr.RebuildReplicationGraph(cells, keyspaces) } func commandListAllTablets(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ListAllTablets requires <cell name|zk vt path>") } cell, err := vtPathToCell(subFlags.Arg(0)) if err != nil { return "", err } return "", dumpAllTablets(wr, cell) } func commandListTablets(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() == 0 { return "", fmt.Errorf("action ListTablets requires <tablet alias|zk tablet path> ...") } zkPaths, err := resolveWildcards(wr, subFlags.Args()) if err != nil { return "", err } aliases := make([]topo.TabletAlias, len(zkPaths)) for i, zkPath := range zkPaths { aliases[i], err = tabletParamToTabletAlias(zkPath) if err != nil { return "", err } } return "", dumpTablets(wr, aliases) } func commandGetSchema(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { tables := subFlags.String("tables", "", "comma separated list of regexps for tables to gather schema information for") excludeTables := subFlags.String("exclude_tables", "", "comma separated list of regexps for tables to exclude") includeViews := subFlags.Bool("include-views", false, "include views in the output") tableNamesOnly := subFlags.Bool("table_names_only", false, "only display the table names that match") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action GetSchema requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } var tableArray []string if *tables != "" { tableArray = strings.Split(*tables, ",") } var excludeTableArray []string if *excludeTables != "" { excludeTableArray = strings.Split(*excludeTables, ",") } sd, err := wr.GetSchema(tabletAlias, tableArray, excludeTableArray, *includeViews) if err == nil { if *tableNamesOnly { for _, td := range sd.TableDefinitions { wr.Logger().Printf("%v\n", td.Name) } } else { wr.Logger().Printf("%v\n", jscfg.ToJson(sd)) } } return "", err } func commandReloadSchema(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ReloadSchema requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.ReloadSchema(tabletAlias) } func commandValidateSchemaShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { excludeTables := subFlags.String("exclude_tables", "", "comma separated list of regexps for tables to exclude") includeViews := subFlags.Bool("include-views", false, "include views in the validation") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ValidateSchemaShard requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } var excludeTableArray []string if *excludeTables != "" { excludeTableArray = strings.Split(*excludeTables, ",") } return "", wr.ValidateSchemaShard(keyspace, shard, excludeTableArray, *includeViews) } func commandValidateSchemaKeyspace(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { excludeTables := subFlags.String("exclude_tables", "", "comma separated list of regexps for tables to exclude") includeViews := subFlags.Bool("include-views", false, "include views in the validation") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ValidateSchemaKeyspace requires <keyspace name|zk keyspace path>") } keyspace, err := keyspaceParamToKeyspace(subFlags.Arg(0)) if err != nil { return "", err } var excludeTableArray []string if *excludeTables != "" { excludeTableArray = strings.Split(*excludeTables, ",") } return "", wr.ValidateSchemaKeyspace(keyspace, excludeTableArray, *includeViews) } func commandPreflightSchema(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { sql := subFlags.String("sql", "", "sql command") sqlFile := subFlags.String("sql-file", "", "file containing the sql commands") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action PreflightSchema requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } change, err := getFileParam(*sql, *sqlFile, "sql") if err != nil { return "", err } scr, err := wr.PreflightSchema(tabletAlias, change) if err == nil { log.Infof(scr.String()) } return "", err } func commandApplySchema(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will apply the schema even if preflight schema doesn't match") sql := subFlags.String("sql", "", "sql command") sqlFile := subFlags.String("sql-file", "", "file containing the sql commands") skipPreflight := subFlags.Bool("skip-preflight", false, "do not preflight the schema (use with care)") stopReplication := subFlags.Bool("stop-replication", false, "stop replication before applying schema") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ApplySchema requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } change, err := getFileParam(*sql, *sqlFile, "sql") if err != nil { return "", err } sc := &myproto.SchemaChange{} sc.Sql = change sc.AllowReplication = !(*stopReplication) // do the preflight to get before and after schema if !(*skipPreflight) { scr, err := wr.PreflightSchema(tabletAlias, sc.Sql) if err != nil { return "", fmt.Errorf("preflight failed: %v", err) } log.Infof("Preflight: " + scr.String()) sc.BeforeSchema = scr.BeforeSchema sc.AfterSchema = scr.AfterSchema sc.Force = *force } scr, err := wr.ApplySchema(tabletAlias, sc) if err == nil { log.Infof(scr.String()) } return "", err } func commandApplySchemaShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will apply the schema even if preflight schema doesn't match") sql := subFlags.String("sql", "", "sql command") sqlFile := subFlags.String("sql-file", "", "file containing the sql commands") simple := subFlags.Bool("simple", false, "just apply change on master and let replication do the rest") newParent := subFlags.String("new-parent", "", "will reparent to this tablet after the change") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ApplySchemaShard requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } change, err := getFileParam(*sql, *sqlFile, "sql") if err != nil { return "", err } var newParentAlias topo.TabletAlias if *newParent != "" { newParentAlias, err = tabletParamToTabletAlias(*newParent) if err != nil { return "", err } } if (*simple) && (*newParent != "") { return "", fmt.Errorf("new_parent for action ApplySchemaShard can only be specified for complex schema upgrades") } scr, err := wr.ApplySchemaShard(keyspace, shard, change, newParentAlias, *simple, *force) if err == nil { log.Infof(scr.String()) } return "", err } func commandApplySchemaKeyspace(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { force := subFlags.Bool("force", false, "will apply the schema even if preflight schema doesn't match") sql := subFlags.String("sql", "", "sql command") sqlFile := subFlags.String("sql-file", "", "file containing the sql commands") simple := subFlags.Bool("simple", false, "just apply change on master and let replication do the rest") if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ApplySchemaKeyspace requires <keyspace|zk keyspace path>") } keyspace, err := keyspaceParamToKeyspace(subFlags.Arg(0)) if err != nil { return "", err } change, err := getFileParam(*sql, *sqlFile, "sql") if err != nil { return "", err } scr, err := wr.ApplySchemaKeyspace(keyspace, change, *simple, *force) if err == nil { log.Infof(scr.String()) } return "", err } func commandValidateVersionShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ValidateVersionShard requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.ValidateVersionShard(keyspace, shard) } func commandValidateVersionKeyspace(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ValidateVersionKeyspace requires <keyspace name|zk keyspace path>") } keyspace, err := keyspaceParamToKeyspace(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.ValidateVersionKeyspace(keyspace) } func commandGetPermissions(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action GetPermissions requires <tablet alias|zk tablet path>") } tabletAlias, err := tabletParamToTabletAlias(subFlags.Arg(0)) if err != nil { return "", err } p, err := wr.GetPermissions(tabletAlias) if err == nil { log.Infof("%v", p.String()) // they can contain '%' } return "", err } func commandValidatePermissionsShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ValidatePermissionsShard requires <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.ValidatePermissionsShard(keyspace, shard) } func commandValidatePermissionsKeyspace(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 1 { return "", fmt.Errorf("action ValidatePermissionsKeyspace requires <keyspace name|zk keyspace path>") } keyspace, err := keyspaceParamToKeyspace(subFlags.Arg(0)) if err != nil { return "", err } return "", wr.ValidatePermissionsKeyspace(keyspace) } func commandGetSrvKeyspace(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action GetSrvKeyspace requires <cell> <keyspace>") } srvKeyspace, err := wr.TopoServer().GetSrvKeyspace(subFlags.Arg(0), subFlags.Arg(1)) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(srvKeyspace)) } return "", err } func commandGetSrvShard(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action GetSrvShard requires <cell> <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(1)) if err != nil { return "", err } srvShard, err := wr.TopoServer().GetSrvShard(subFlags.Arg(0), keyspace, shard) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(srvShard)) } return "", err } func commandGetEndPoints(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 3 { return "", fmt.Errorf("action GetEndPoints requires <cell> <keyspace/shard|zk shard path> <tablet type>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(1)) if err != nil { return "", err } tabletType := topo.TabletType(subFlags.Arg(2)) endPoints, err := wr.TopoServer().GetEndPoints(subFlags.Arg(0), keyspace, shard, tabletType) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(endPoints)) } return "", err } func commandGetShardReplication(wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) (string, error) { if err := subFlags.Parse(args); err != nil { return "", err } if subFlags.NArg() != 2 { return "", fmt.Errorf("action GetShardReplication requires <cell> <keyspace/shard|zk shard path>") } keyspace, shard, err := shardParamToKeyspaceShard(subFlags.Arg(1)) if err != nil { return "", err } shardReplication, err := wr.TopoServer().GetShardReplication(subFlags.Arg(0), keyspace, shard) if err == nil { wr.Logger().Printf("%v\n", jscfg.ToJson(shardReplication)) } return "", err } type rTablet struct { *topo.TabletInfo *myproto.ReplicationPosition } type rTablets []*rTablet func (rts rTablets) Len() int { return len(rts) } func (rts rTablets) Swap(i, j int) { rts[i], rts[j] = rts[j], rts[i] } // Sort for tablet replication. // master first, then i/o position, then sql position func (rts rTablets) Less(i, j int) bool { // NOTE: Swap order of unpack to reverse sort l, r := rts[j], rts[i] // l or r ReplicationPosition would be nil if we failed to get // the position (put them at the beginning of the list) if l.ReplicationPosition == nil { return r.ReplicationPosition != nil } if r.ReplicationPosition == nil { return false } var lTypeMaster, rTypeMaster int if l.Type == topo.TYPE_MASTER { lTypeMaster = 1 } if r.Type == topo.TYPE_MASTER { rTypeMaster = 1 } if lTypeMaster < rTypeMaster { return true } if lTypeMaster == rTypeMaster { if l.MapKeyIo() < r.MapKeyIo() { return true } if l.MapKeyIo() == r.MapKeyIo() { if l.MapKey() < r.MapKey() { return true } } } return false } func sortReplicatingTablets(tablets []*topo.TabletInfo, positions []*myproto.ReplicationPosition) []*rTablet { rtablets := make([]*rTablet, len(tablets)) for i, pos := range positions { rtablets[i] = &rTablet{tablets[i], pos} } sort.Sort(rTablets(rtablets)) return rtablets } // RunCommand will execute the command using the provided wrangler. // It will return the actionPath to wait on for long remote actions if // applicable. func RunCommand(wr *wrangler.Wrangler, args []string) (string, error) { action := args[0] actionLowerCase := strings.ToLower(action) for _, group := range commands { for _, cmd := range group.commands { if strings.ToLower(cmd.name) == actionLowerCase { subFlags := flag.NewFlagSet(action, flag.ExitOnError) subFlags.Usage = func() { wr.Logger().Errorf("Usage: %s %s %s\n\n", action, cmd.name, cmd.params) wr.Logger().Errorf("%s\n\n", cmd.help) subFlags.PrintDefaults() } return cmd.method(wr, subFlags, args[1:]) } } } wr.Logger().Errorf("Unknown command: %#v", action) return "", ErrUnknownCommand } // PrintHelp will print the list of commands to stderr func PrintHelp() { for _, group := range commands { fmt.Fprintf(os.Stderr, "%s:\n", group.name) for _, cmd := range group.commands { if strings.HasPrefix(cmd.help, "HIDDEN") { continue } fmt.Fprintf(os.Stderr, " %s %s\n", cmd.name, cmd.params) } fmt.Fprintf(os.Stderr, "\n") } }
//================================================================ // Fast gates on uint32 //================================================================ func mpcZ2XorFast(x, y []uint32, c *Circuit) (z []uint32) { z = []uint32{0, 0, 0} z[0] = x[0] ^ y[0] z[1] = x[1] ^ y[1] z[2] = x[2] ^ y[2] return }
<reponame>etoile/EtoileUI /* Copyright (C) 2013 <NAME> Author: <NAME> <<EMAIL>> Date: December 2013 License: Modified BSD (see COPYING) */ #import <Foundation/Foundation.h> #import <EtoileUI/ETGraphicsBackend.h> #import <EtoileFoundation/EtoileFoundation.h> #import <EtoileUI/ETLayoutItemFactory.h> @interface ETLayoutItemFactory (ETUIPatternAdditions) @property (nonatomic, readonly) ETLayoutItemGroup *objectPicker; @end
import React from "react" import Icon from "../Icon" import { Spacer } from "../Layout" import { Paragraph, ParagraphBase } from "../Typography" function PuzzleInfoContent() { return ( <div className="relative pb-8"> <Paragraph.Base> Hover the image to discover each piece of work and to explore the puzzle. </Paragraph.Base> <Paragraph.Base> Each piece represents an artifact within the cathedral that needs restoration. </Paragraph.Base> <div className="border-t border-white border-b py-2 xl:flex justify-between"> <ParagraphBase className="text-md font-normal m-0">Key</ParagraphBase> <div className="xl:flex"> <span className="flex items-center"> <span className="inline-block w-4 h-4 overflow-hidden bg-white border border-white border-opacity-60" /> <ParagraphBase className="text-base font-light inline m-0 pl-2"> Restoration Funded </ParagraphBase> </span> <span className="flex items-center xl:ml-4"> <span className="inline-block w-4 h-4 overflow-hidden bg-black border border-white border-opacity-60" /> <ParagraphBase className="text-base font-light inline m-0 pl-2"> Not Funded </ParagraphBase> </span> </div> </div> <span className="absolute -top-4 -right-4 cursor-pointer"> <Icon.Close /> </span> </div> ) } export default PuzzleInfoContent
import { HttpService } from '@nestjs/axios'; import { HttpException, HttpStatus, Injectable } from '@nestjs/common'; import { Octokit } from '@octokit/core'; require('dotenv').config(); const GITHUB_TOKEN = process.env.GITHUB_TOKEN; const octokit = new Octokit({ auth: GITHUB_TOKEN }); @Injectable() export class AppService { private coinApiKey = process.env.COIN_API_KEY; private defaultQuoteRate = process.env.DEFAULT_RATE; constructor(private httpService: HttpService) {} async getSupportedCryptoCurrency() { const assets = await this.httpService .get('https://rest-sandbox.coinapi.io/v1/assets', { headers: { 'X-CoinAPI-Key': this.coinApiKey, }, }) .toPromise(); return (assets.data as []).map((elem: any) => elem.name); } async getPriceInformation(baseRate: string, quoteRate: string) { const price = await this.httpService .get( `https://rest-sandbox.coinapi.io/v1/exchangerate/${baseRate}/${ quoteRate !== undefined ? quoteRate : this.defaultQuoteRate }`, { headers: { 'X-CoinAPI-Key': this.coinApiKey, }, }, ) .toPromise(); const { src_side_quote, ...data } = price.data; return data; } }
// NewRichTextItalic creates a new RichTextItalic // // @param text Text func NewRichTextItalic(text RichText) *RichTextItalic { richTextItalicTemp := RichTextItalic{ tdCommon: tdCommon{Type: "richTextItalic"}, Text: text, } return &richTextItalicTemp }
import { GetServerSideProps, NextPage } from "next"; import React, { useEffect } from "react"; import { Layout } from "../components/UI/Layout"; import { LoadingSpinner } from "../components/UI/LoadingSpinner"; import { confirmAccount } from "../crud/users"; const ConfirmEmailPage: NextPage = () => { // useEffect(() => { // (async () => { // const { value, error } = await confirmAccount({ // confirmationKey: "ASDASDAsD", // }); // })().catch((err) => { // console.error(err); // }); // }, []); return ( <Layout pageTitle="Check your inbox"> <p className="text-white"> You'll be getting an email shortly with a confirmation link. </p> </Layout> ); }; export default ConfirmEmailPage; export const getServerSideProps: GetServerSideProps = async (context) => { const { confirmationKey } = context.query as { confirmationKey: string }; if (!confirmationKey) { return { props: {}, }; } const { value, error } = await confirmAccount({ confirmationKey, }); if (error) { return { redirect: { destination: "/confirm-email-error", permanent: false, }, }; } return { redirect: { destination: "/confirm-email-success", permanent: false, }, }; };
/** * Helper function for service add and check functions: * check if the given service can be decoded with the parameters in rd; * if yes, return TRUE and line start and count for both fields within * the range limits of rd. */ static vbi_bool vbi_raw_decoder_check_service(const vbi_raw_decoder *rd, int srv_idx, int strict, int *row, int *count) { double signal; int field; vbi_bool result = FALSE; if (vbi_services[srv_idx].scanning != rd->scanning) goto finished; signal = vbi_services[srv_idx].cri_bits / (double) vbi_services[srv_idx].cri_rate + (vbi_services[srv_idx].frc_bits + vbi_services[srv_idx].payload) / (double) vbi_services[srv_idx].bit_rate; if (rd->offset > 0 && strict > 0) { double offset = rd->offset / (double) rd->sampling_rate; double samples_end = (rd->offset + rd->bytes_per_line) / (double) rd->sampling_rate; if (offset > (vbi_services[srv_idx].offset / 1e9 - 0.5e-6)) { debug4("skipping service 0x%08X: H-Off %d = %f > %f", vbi_services[srv_idx].id, rd->offset, offset, vbi_services[srv_idx].offset / 1e9 - 0.5e-6); goto finished; } if (samples_end < (vbi_services[srv_idx].offset / 1e9 + signal + 0.5e-6)) { debug5("skipping service 0x%08X: sampling window too short: end %f < %f = offset %d *10^-9 + %f", vbi_services[srv_idx].id, samples_end, vbi_services[srv_idx].offset / 1e9 + signal + 0.5e-6, vbi_services[srv_idx].offset, signal); goto finished; } } else { double samples = rd->bytes_per_line / (double) rd->sampling_rate; if (samples < (signal + 1.0e-6)) { debug1("skipping service 0x%08X: not enough samples", vbi_services[srv_idx].id); goto finished; } } for (field = 0; field < 2; field++) { int start = rd->start[field]; int end = start + rd->count[field] - 1; if (!rd->synchronous) { debug1("skipping service 0x%08X: not sync'ed", vbi_services[srv_idx].id); goto finished; } if (!(vbi_services[srv_idx].first[field] && vbi_services[srv_idx].last[field])) { count[field] = 0; continue; } if (rd->count[field] == 0) { count[field] = 0; continue; } if (rd->start[field] > 0 && strict > 0) { if (strict > 1 || (vbi_services[srv_idx].first[field] == vbi_services[srv_idx].last[field])) if (start > vbi_services[srv_idx].first[field] || end < vbi_services[srv_idx].last[field]) { debug5("skipping service 0x%08X: lines not available have %d-%d, need %d-%d", vbi_services[srv_idx].id, start, end, vbi_services[srv_idx].first[field], vbi_services[srv_idx].last[field]); goto finished; } row[field] = MAX(0, (int) vbi_services[srv_idx].first[field] - start); count[field] = MIN(end, vbi_services[srv_idx].last[field]) - (start + row[field]) + 1; } else { row[field] = 0; count[field] = rd->count[field]; } } row[1] += rd->count[0]; if (count[0] + count[1] == 0) { debug1("skipping service 0x%08X: zero line count", vbi_services[srv_idx].id); goto finished; } result = TRUE; finished: return result; }
<filename>src/pages/dashboard/status/index.tsx import React, { useEffect, useState } from "react" import Box from "../../../components/Box" import Breadcrumb from "../../../components/Breadcrumb" import LabelValue from "../../../components/LabelValue" import Panel from "../../../components/Panel" import Tag from "../../../components/Tag" import AppLayout from "../../../layouts/AppLayout" import useConstants from "../../../utils/hooks/useConstants" const StatusPage: React.FC<{ location: any }> = ({ location }) => { const constants = useConstants() type TData = { title: string url: string } const dataList: TData[] = [ { title: "Security", url: constants.getApiSecurity() }, { title: "Bot", url: constants.getApiBot() }, { title: "Budget", url: constants.getApiBudget() }, ] return ( <AppLayout title="Status" location={location} needAuth={true}> <section className="section"> <article className="container"> <div className="columns"> <div className="column" /> <div className="column is-10"> <Breadcrumb paths={["Status"]} /> <br /> <div className="columns is-multiline is-centered"> {dataList.map((data, i) => ( <div key={`data${i}`} className="column is-6"> <StatusPanel title={data.title} url={data.url} /> </div> ))} </div> </div> <div className="column" /> </div> </article> </section> </AppLayout> ) } const StatusPanel: React.FC<{ title: string; url: string }> = ({ title, url, }) => { const [outcome, setOutcome] = useState<string>("") const [content, setContent] = useState<string>("") useEffect(() => { ;(async () => { try { const response = await fetch(`${url}/ping`) const responseBody = await response.json() setContent( JSON.stringify(responseBody).replace("[", "[\n\t").replace("]", "\n]") ) setOutcome(responseBody.outcome) } catch (e) { console.log(e) setOutcome("DOWN") } })() }, []) return ( <Panel title={title} color="is-link"> <Box> <div className="columns"> <div className="column is-8"> <LabelValue label="URL">{url}</LabelValue> </div> <div className="column is-4"> <LabelValue label="Status"> {outcome === "UP" ? ( <Tag value="Online" color="is-success" /> ) : outcome === "DOWN" ? ( <Tag value="Offline" color="is-danger" /> ) : ( <Tag value="Checking" color="is-warning" /> )} </LabelValue> </div> </div> {content && <pre dangerouslySetInnerHTML={{ __html: content }} />} </Box> </Panel> ) } export default StatusPage
<filename>src/common/clazz/release.ts import * as core from '@actions/core' import {GithubKit} from './github-kit' import {IRelease} from '../interface/release' import {IReleaseAsset} from '../interface/asset' import {IsoDateString} from '../types/iso-date-string' import {Podcast} from '../types/podcast' export class Release implements IRelease { assets: IReleaseAsset[] body: string | null | undefined created_at: IsoDateString html_url: string published_at: IsoDateString | null url: string constructor(data: IRelease) { this.assets = data.assets this.body = data.body this.created_at = data.created_at this.html_url = data.html_url this.published_at = data.published_at this.url = data.url } static cast(releases: IRelease[]): Release[] { return releases.map(release => new Release(release)) } async convertToPodcastInfo(kit: GithubKit): Promise<Podcast | null> { // noinspection RegExpRedundantEscape const markdownImageRegx = /!\[.*?\]\((.*?)\)/ const defaultImage = 'https://cdn.jsdelivr.net/gh/bxb100/bxb100@master/png2.png' if (this.body) { const split = this.body.split(/\r\n---+\r\n/) core.debug(`convertToPodcastInfo: ${split}`) if (split.length < 1) { return null } const title = split[0] const regexes = split[1]?.match(markdownImageRegx) const image = regexes && regexes[1] const content = await kit.renderMarkdown(split[2]) return { title, image: image || defaultImage, content } } return null } }
/* * Copyright 2013-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.kuujo.vertigo.test.unit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import net.kuujo.vertigo.component.ModuleConfig; import net.kuujo.vertigo.component.ModuleContext; import net.kuujo.vertigo.component.VerticleConfig; import net.kuujo.vertigo.component.VerticleContext; import net.kuujo.vertigo.impl.ContextBuilder; import net.kuujo.vertigo.network.NetworkConfig; import net.kuujo.vertigo.network.NetworkContext; import net.kuujo.vertigo.network.impl.DefaultNetworkConfig; import org.junit.Test; import org.vertx.java.core.json.JsonObject; /** * Network/component/instance/input context tests. * * @author <NAME> */ public class ContextTest { @Test public void testDefaultNetworkContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); assertEquals(0, context.components().size()); } @Test public void testConfiguredNetworkContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); assertEquals(0, context.components().size()); } @Test public void testDefaultFeederVerticleContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); network.addVerticle("feeder", "feeder.py"); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); VerticleContext verticleContext = context.component("feeder"); assertEquals("feeder", verticleContext.name()); assertEquals("vertigo.test.feeder", verticleContext.address()); assertEquals("feeder.py", verticleContext.main()); assertTrue(verticleContext.isVerticle()); assertFalse(verticleContext.isModule()); assertEquals(new JsonObject(), verticleContext.config()); assertEquals(1, verticleContext.numInstances()); assertFalse(verticleContext.isWorker()); assertFalse(verticleContext.isMultiThreaded()); assertNotNull(verticleContext.network()); } @Test public void testConfiguredFeederVerticleContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); VerticleConfig verticle = network.addVerticle("feeder", "feeder.py"); verticle.setMain("feeder.py"); verticle.setConfig(new JsonObject().putString("foo", "bar")); verticle.setInstances(2); verticle.setGroup("test"); verticle.setWorker(true); verticle.setMultiThreaded(true); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); VerticleContext verticleContext = context.component("feeder"); assertEquals("feeder", verticleContext.name()); assertEquals("vertigo.test.feeder", verticleContext.address()); assertEquals("feeder.py", verticleContext.main()); assertTrue(verticleContext.isVerticle()); assertFalse(verticleContext.isModule()); assertEquals("bar", verticleContext.config().getString("foo")); assertEquals(2, verticleContext.numInstances()); assertEquals(2, verticleContext.instances().size()); assertTrue(verticleContext.isWorker()); assertTrue(verticleContext.isMultiThreaded()); assertEquals("test", verticleContext.group()); assertNotNull(verticleContext.network()); } @Test public void testDefaultFeederModuleContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); network.addModule("feeder", "com.test~test-module~1.0"); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); ModuleContext moduleContext = context.component("feeder"); assertEquals("feeder", moduleContext.name()); assertEquals("vertigo.test.feeder", moduleContext.address()); assertEquals("com.test~test-module~1.0", moduleContext.module()); assertFalse(moduleContext.isVerticle()); assertTrue(moduleContext.isModule()); assertEquals(new JsonObject(), moduleContext.config()); assertEquals(1, moduleContext.numInstances()); assertNotNull(moduleContext.network()); } @Test public void testConfiguredFeederModuleContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); ModuleConfig verticle = network.addModule("feeder", "com.test~test-module~1.0"); verticle.setModule("com.test~test-module~1.0"); verticle.setConfig(new JsonObject().putString("foo", "bar")); verticle.setInstances(2); verticle.setGroup("test"); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); ModuleContext moduleContext = context.component("feeder"); assertEquals("feeder", moduleContext.name()); assertEquals("vertigo.test.feeder", moduleContext.address()); assertEquals("com.test~test-module~1.0", moduleContext.module()); assertFalse(moduleContext.isVerticle()); assertTrue(moduleContext.isModule()); assertEquals("bar", moduleContext.config().getString("foo")); assertEquals(2, moduleContext.numInstances()); assertEquals(2, moduleContext.instances().size()); assertEquals("test", moduleContext.group()); assertNotNull(moduleContext.network()); } @Test public void testDefaultWorkerVerticleContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); network.addVerticle("worker", "worker.py"); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); VerticleContext verticleContext = context.component("worker"); assertEquals("worker", verticleContext.name()); assertEquals("vertigo.test.worker", verticleContext.address()); assertEquals("worker.py", verticleContext.main()); assertTrue(verticleContext.isVerticle()); assertFalse(verticleContext.isModule()); assertEquals(new JsonObject(), verticleContext.config()); assertEquals(1, verticleContext.numInstances()); assertFalse(verticleContext.isWorker()); assertFalse(verticleContext.isMultiThreaded()); assertNotNull(verticleContext.network()); } @Test public void testConfiguredWorkerVerticleContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); VerticleConfig verticle = network.addVerticle("worker", "worker.py"); verticle.setMain("worker.py"); verticle.setConfig(new JsonObject().putString("foo", "bar")); verticle.setInstances(2); verticle.setGroup("test"); verticle.setWorker(true); verticle.setMultiThreaded(true); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); VerticleContext verticleContext = context.component("worker"); assertEquals("worker", verticleContext.name()); assertEquals("vertigo.test.worker", verticleContext.address()); assertEquals("worker.py", verticleContext.main()); assertTrue(verticleContext.isVerticle()); assertFalse(verticleContext.isModule()); assertEquals("bar", verticleContext.config().getString("foo")); assertEquals(2, verticleContext.numInstances()); assertEquals(2, verticleContext.instances().size()); assertTrue(verticleContext.isWorker()); assertTrue(verticleContext.isMultiThreaded()); assertEquals("test", verticleContext.group()); assertNotNull(verticleContext.network()); } @Test public void testDefaultWorkerModuleContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); network.addModule("worker", "com.test~test-module~1.0"); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); ModuleContext moduleContext = context.component("worker"); assertEquals("worker", moduleContext.name()); assertEquals("vertigo.test.worker", moduleContext.address()); assertEquals("com.test~test-module~1.0", moduleContext.module()); assertFalse(moduleContext.isVerticle()); assertTrue(moduleContext.isModule()); assertEquals(new JsonObject(), moduleContext.config()); assertEquals(1, moduleContext.numInstances()); assertNotNull(moduleContext.network()); } @Test public void testConfiguredWorkerModuleContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); ModuleConfig module = network.addModule("worker", "com.test~test-module~1.0"); module.setModule("com.test~test-module~1.0"); module.setConfig(new JsonObject().putString("foo", "bar")); module.setInstances(2); module.setGroup("test"); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); ModuleContext moduleContext = context.component("worker"); assertEquals("worker", moduleContext.name()); assertEquals("vertigo.test.worker", moduleContext.address()); assertEquals("com.test~test-module~1.0", moduleContext.module()); assertFalse(moduleContext.isVerticle()); assertTrue(moduleContext.isModule()); assertEquals("bar", moduleContext.config().getString("foo")); assertEquals(2, moduleContext.numInstances()); assertEquals(2, moduleContext.instances().size()); assertEquals("test", moduleContext.group()); assertNotNull(moduleContext.network()); } @Test public void testInstanceContext() { DefaultNetworkConfig network = new DefaultNetworkConfig("test"); VerticleConfig verticle = network.addVerticle("feeder", "feeder.py"); verticle.setInstances(2); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); assertEquals("vertigo.test", context.address()); VerticleContext verticleContext = context.component("feeder"); assertEquals("feeder", verticleContext.name()); assertEquals("vertigo.test.feeder", verticleContext.address()); assertEquals(2, verticleContext.instances().size()); assertEquals("vertigo.test.feeder-1", verticleContext.instances().get(0).address()); assertEquals("vertigo.test.feeder-2", verticleContext.instances().get(1).address()); assertNotNull(verticleContext.instances().get(0).component()); } @Test public void testUpdateContext() { NetworkConfig network = new DefaultNetworkConfig("test"); network.addVerticle("sender", "sender.py", 2); NetworkContext context = ContextBuilder.buildContext(network, "vertigo"); NetworkConfig network2 = new DefaultNetworkConfig("test"); network2.addVerticle("receiver", "receiver.py", 2); NetworkContext context2 = ContextBuilder.buildContext(network2, "vertigo"); assertNotNull(context2.component("receiver")); context.notify(context2); assertNotNull(context.component("receiver")); } }
<reponame>tittoassini/router-api<gh_stars>1-10 {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveGeneric #-} module Data.Pattern( Pattern(..),WildCard(..) ,patternQ,filterPatternQ,prefixPattern,onlyWildCards,HVar(..) ) where import qualified Data.BitVector as V import Data.Foldable (toList) import Data.List (intercalate) import Data.Typed hiding (Con, Var) -- import Language.Haskell.Meta.Parse haskell-src-meta, import Language.Haskell.TH import Language.Haskell.TH.Syntax -- |A nested pattern data Pattern v = -- |A constructor Con String -- ^Name of the constructor (e.g. "True") [Pattern v] -- ^Patterns for the | Var v -- A variable | Val [Bool] -- A value, binary encoded (using 'flat') deriving (Functor,Foldable,Eq, Ord, Show, Generic) instance Flat v => Flat (Pattern v) instance Model v => Model (Pattern v) -- |A Variable that can be either names (e.g. "a") or a wildcard "_" data HVar = V String | W deriving (Eq, Ord, Show, Generic) instance Flat HVar instance Model HVar -- |A wildcard "_", that matches any value data WildCard = WildCard deriving (Eq, Ord, Show, Generic) instance Flat WildCard instance Model WildCard prefixPattern :: (Foldable t, Flat a) => t a -> Pattern HVar prefixPattern = listPatt (Var W) listPatt :: (Foldable t, Flat a) => Pattern v -> t a -> Pattern v listPatt = foldr (\a p -> Con "Cons" [valPattern a,p]) valPattern :: Flat a => a -> Pattern v valPattern = Val . V.bits -- x = filter [p|\Message _ (subject:_) _ |] -- \subject -> Con ... v1 -- filterPatternQ :: Quasi m => Q Pat -> m Exp filterPatternQ patq = do p <- convertPattern (Var . V) (Var W) patq -- print $ pmatch p -- let vars = map (\(V v) -> v) . filter isVar $ toList p -- TODO: when done, remove haskell-src-meta -- let Right c = parseExp $ concat["\\",unwords $ vars,"-> onlyWildCards <$> (",showPatt p,")"] -- let c = concat["\\",unwords $ toList p,"->",showPatt p] let c = LamE (map (VarP . mkName) vars) (UInfixE (VarE (mkName "onlyWildCards")) (VarE (mkName "<$>")) (ParensE (asExp p))) -- print c >> print c2 >> print (c == c2) return c isVar (V _) = True isVar _ = False patternQ :: Quasi m => Q Pat -> m (Pattern WildCard) patternQ = convertPattern (\n -> error $ unwords ["Variables are not allowed in patterns, use wildcards (_) only, found:",n]) (Var WildCard) -- convertPattern :: Quasi m => Q Pat -> m (Pattern String) convertPattern :: Quasi m => (String -> Pattern v) -> Pattern v -> Q Pat -> m (Pattern v) convertPattern onVar onWild p = runQ (p >>= convertM onVar onWild) where convertM onVar onWild pat = case pat of ConP n args -> Con (name n) <$> mapM (convertM onVar onWild) args VarP n -> return $ onVar (name n) WildP -> return onWild LitP l -> return . convLit $ l ParensP p -> convertM onVar onWild p -- InfixP p1 (Name (OccName ":" ) (NameG DataName (PkgName "ghc-prim") (ModName "GHC.Types"))) p2 -> error . unwords $ ["GOTIT"] p -> error . unwords $ ["Unsupported pattern",pprint p] name (Name (OccName n) _) = n convLit l = case l of CharL c -> valPattern c StringL s -> valPattern s -- IntegerL i -> -- Integer PROB: what type to map to -- RationalL r -> Rational showPatt :: Pattern HVar -> String showPatt (Con n ps) = unwords ["Data.Pattern.Con",show n,"[",intercalate "," . map showPatt $ ps,"]"] showPatt (Var (V v)) = v -- concat ["val (",v,")"] -- showVar v --showPatt (Var W) = "Var W" -- "WildCard" -- "WildCard" -- "_" showPatt p = show p -- show bs -- concat [Data.BitVector,show bs asExp (Con n ps) = AppE (AppE (c "Data.Pattern.Con") (LitE (StringL n))) (ListE (map asExp ps)) asExp (Var (V v)) = VarE (mkName v) asExp (Var W) = AppE (c "Data.Pattern.Var") (c "W") c = ConE . mkName onlyWildCards W = WildCard
// NewCreateTechRequestBody builds the HTTP request body from the payload of // the "createTech" endpoint of the "hy_tech" service. func NewCreateTechRequestBody(p *hytech.CreateTechPayload) *CreateTechRequestBody { body := &CreateTechRequestBody{ Name: p.Name, } return body }
/* * Test for getting form fields without node path. */ @Test public void testGetFormFieldsWithoutNodePath() throws ApiException, MessagingException, IOException { String remoteFileName = "TestGetFormFieldsWithoutNodePath.docx"; TestInitializer.UploadFile( PathUtil.get(TestInitializer.LocalTestFolder, fieldFolder + "/FormFilled.docx"), remoteDataFolder + "/" + remoteFileName ); GetFormFieldsRequest request = new GetFormFieldsRequest( remoteFileName, null, remoteDataFolder, null, null, null ); FormFieldsResponse result = TestInitializer.wordsApi.getFormFields(request); assertNotNull(result); assertNotNull(result.getFormFields()); assertNotNull(result.getFormFields().getList()); assertEquals(5, result.getFormFields().getList().size()); assertEquals("FullName", result.getFormFields().getList().get(0).getName()); }
<reponame>Shimingli/hement package com.shiming.network.retrofit; import android.content.Context; import com.shiming.network.BuildConfig; import com.shiming.network.cookie.PersistentCookieJar; import com.shiming.network.cookie.cache.SetCookieCache; import com.shiming.network.cookie.persistence.SharedPrefsCookiePersistor; import java.io.File; import java.util.concurrent.TimeUnit; import okhttp3.Cache; import okhttp3.OkHttpClient; import okhttp3.logging.HttpLoggingInterceptor; import retrofit2.Retrofit; import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory; import retrofit2.converter.gson.GsonConverterFactory; /** * Created by shiming on 2017/4/3. */ public class SMRetrofit { private static SMRetrofit mRetrofit; private Context mContext; //是否https的链接 private static boolean mIsHttps; private String mServerAddressFormal; private static final int DEFAULT_TIMEOUT = 15; private OkHttpClient mClient; private Retrofit mBuild; private SMRetrofit(Context context){ //为什么要取这个context的对象,因为这个生命周期比较短,百度 mContext = context.getApplicationContext(); initRetrofit(); } private void initRetrofit() { mServerAddressFormal = BuildConfig.SERVER_ADDRESS_PERSONAL; initRetrofit(mServerAddressFormal); } private void initRetrofit(String serverAddressFormal) { // TODO: 2018/11/27 这个log 有什么的作用 HttpLoggingInterceptor logging = new HttpLoggingInterceptor(new HttpLoggingInterceptor.Logger() { @Override public void log(String message) { HttpLoggingInterceptor.Logger.DEFAULT.log(message); } }); logging.redactHeader("Authorization"); logging.redactHeader("Cookie"); logging.setLevel(HttpLoggingInterceptor.Level.BODY); /* ------------------------ */ File cache = new File(mContext.getCacheDir(), "cache"); int cacheSize=10*1024*1024; Cache cache1 = new Cache(cache, cacheSize); mClient = new OkHttpClient.Builder().cache(cache1) .addInterceptor(new HeadersInterceptor(mContext)) .connectTimeout(DEFAULT_TIMEOUT, TimeUnit.SECONDS) .readTimeout(DEFAULT_TIMEOUT, TimeUnit.SECONDS) .writeTimeout(DEFAULT_TIMEOUT, TimeUnit.SECONDS) .addInterceptor(logging) .cookieJar(new PersistentCookieJar(new SetCookieCache(), new SharedPrefsCookiePersistor(mContext))) .build(); mBuild = new Retrofit.Builder() .baseUrl(serverAddressFormal) //通过GsonConverterFactory为Retrofit添加Gson支持 .addConverterFactory(GsonConverterFactory.create()) .addCallAdapterFactory(RxJava2CallAdapterFactory.create()) .client(mClient) .build(); } public static SMRetrofit getInstance(Context context){ if (mRetrofit==null||mIsHttps){ synchronized (SMRetrofit.class){ if (mRetrofit==null||mIsHttps){ mRetrofit=new SMRetrofit(context); } } } return mRetrofit; } public Retrofit getBuild(){ return mBuild; } public static <T> T getService(Context context,Class<T> servcie){ return SMRetrofit.getInstance(context).getBuild().create(servcie); } }
<gh_stars>10-100 import { Stack, StackProps, Construct, App, Duration } from "@aws-cdk/core"; import { StringParameter } from "@aws-cdk/aws-ssm"; import { CdkPasswordless } from "../lib/index"; import { Function, Runtime, Code } from "@aws-cdk/aws-lambda"; class myStack extends Stack { constructor(scope: Construct, id: string, props: StackProps = {}) { super(scope, id, props); const postConfirmation = new Function(this, "postConfirmation", { runtime: Runtime.NODEJS_10_X, code: Code.fromAsset("./functions"), handler: "postConfirm.handler" }); const pless = new CdkPasswordless(this, "myPasswordLess", { mailSubject: "myStack - signIn", userPoolClientName: "myClientName", verifiedDomains: ["gmail.com"], postConfirmationLambda: postConfirmation }); new StringParameter(this, "userPoolIdParam", { parameterName: "/cognito/userPoolId", stringValue: pless.userPool.userPoolId }); new StringParameter(this, "userPoolClientIdParam", { parameterName: "/cognito/userPoolClientId", stringValue: pless.userPoolClient.userPoolClientId }); } } new myStack(new App(), "my-stack");
Smart City Security Issues: Depicting Information Security Issues in the Role of an Urban Environment For the first time in the history of humanity, more them half of the population is now living in big cities. This scenario has raised concerns related systems that provide basic services to citizens. Even more, those systems has now the responsibility to empower the citizen with information and values that may aid people on daily decisions, such as related to education, transport, healthy and others. This environment creates a set of services that, interconnected, can develop a brand new range of solutions that refers to a term often called System of Systems. In this matter, focusing in a smart city, new challenges related to information security raises, those concerns may go beyond the concept of privacy issues exploring situations where the entire environment could be affected by issues different them only break the confidentiality of a data. This paper intends to discuss and propose 9 security issues that can be part of a smart city environment, and that explores more them just citizens privacy violations.
import * as data from "../data"; import { expect } from "chai"; // describe("Trailing Stop Trigger Tests", async function() { // let trigger, trade; // it("check & validate for trailing stop trigger", done => { // trigger = { // ...data.default.trigger, // params: '{ "intialPrice": "0.9", "trail": "0.1" }' }; // trade = { // ...data.default.trade, // price: 0.01 }; // const trailingStopTrigger = new TrailingStopTrigger(trigger); // trailingStopTrigger.on("close", data => { // done(); // }); // trailingStopTrigger.onTrade(trade); // }) // it("check & validate for trailing stop trigger ", done => { // trigger = { // ...data.default.trigger, // params: '{ "intialPrice": "0.9", "trail": "0.1" }' }; // trade = { // ...data.default.trade, // price: 0.01 }; // const trailingStopTrigger = new TrailingStopTrigger(trigger); // trailingStopTrigger.on("close", data => { // expect(data).to.deep.equal( // { advice: 'market-sell', price: 1, amount: 0.24 }) // done(); // }); // trailingStopTrigger.onTrade(trade); // }) // })
Protectivity and safety following recombinant hepatitis B vaccine with different source of bulk compared to hepatitis B (Bio Farma) vaccine in Indonesia Purpose Indonesia, a high populous and the second-highest country in epidemicity of hepatitis B in South-East Asia require maintaining its capacity of monovalent hepatitis B production to keep up with both the national immunization program and global needs. To keep the sustainability of the vaccine, a new bulk is needed to be made available. This study aims to evaluate the immunogenicity and safety of Bio Farma newly formulated recombinant hepatitis B vaccines, which came from different sources of bulk, compared to the already registered hepatitis B vaccine. Materials and Methods An experimental, randomized, double-blind, cohort intervention phase II clinical trial was conducted on three recombinant hepatitis B vaccines from different bulk sources, with Bio Farma registered hepatitis B vaccine as the control group. A total of 536 participants around age 10 to 40 years old were thricely vaccinated with twice serological assessments. The subject’s safety was monitored for 28 days after each vaccination. Results Of 536 enrolled participants, 521 finished the vaccination and serology assessments. The investigational products were proven not to be inferior to the control. All vaccines were well tolerated. No differences in rates of local and systemic reactions were seen between the investigational products and control. No serious adverse event was found to be related to the investigational vaccines. Conclusion Investigational vaccines are shown to be equally immunogenic and safe as the control vaccine. Disease caused by HBV has a worldwide distribution. The endemicity of active HBV infection is reflected in the serologic prevalence of the hepatitis B surface antigen (HBsAg) in the general population of a defined geographical area. HBsAg prevalence of ≥8% defines highly endemic areas, prevalence of 5%-7% defines high intermediate, 2%-4% low intermediate, and <2% defines low endemic areas . High(est) endemicity of hepatitis B (currently defined as ≥8% of the population HBsAg-positive) is found in areas of sub-Saharan Africa, South-East Asia, the Eastern Mediterranean countries, South and Western Pacific islands, the interior of the Amazon Basin, and in certain parts of the Caribbean; in these areas, up to 20% of the population may be chronically infected. Intermediate endemicity (2%-8% of the population HBsAg-positive) is located in South-Central and South-West Asia, Eastern and Southern Europe, the Russian Federation, and most of Central and South America . Indonesia is a country with high endemicity of hepatitis B, the second largest in Southeast Asia country after Myanmar. Based on the results of study and blood screening of Palang Merah Indonesia (blood bank) donors, it is estimated that among 100 Indonesians, 10 of them have been infected with hepatitis B and C. So now there are an estimated 28 million Indonesians infected with hepatitis B . Although other primates have been infected in laboratory conditions, HBV infection affects only humans. No animal or insect hosts or vectors are known to exist . World Health Organization (WHO) has estimated that 84% of all infants worldwide received at least 3 doses of hepatitis B containing vaccine in 2015, and 39% of newborns received the birth dose. Vaccinating against hepatitis B has been associated with substantial reductions in acute and chronic hepatitis B infections and hepatocellular carcinoma mortality . Strategic Advisory Group of Experts on Immunization recommends that all infants receive the birth dose during the first contact with health facilities at any time up to the time of the first primary dose. The birth dose given after 24 hours should be reported as a late birth dose vaccination . The birth dose should be followed by 2 or 3 doses to complete the primary series . Previous study of recombinant hepatitis B vaccine Bio Farma Vaccine Institute had started producing the hepatitis B recombinant vaccine since the year 2000. The first study for hepatitis B vaccine recombinant in Indonesia was conducted by the Center for Disease Control, National Institute of Health Research and Development, and Ministry of Health. Two hundred and twenty infants were involved in this study and the recommendation from this study is to administer the hepatitis B vaccine in newborn infants . Another study was conducted in 2001 in infants with different immunization schedules from Hasan Sadikin General Hospital in Bandung, West Java. This study's result was 95.4% of infants were protected in 0, 1, 2 schedule and 98.9% were protected in 0, 1, 6 schedule. Vaccination schedule 0, 1, 6 months of age-induced higher antibody compare to schedule 0, 1, 2 months of age . A bridging study was conducted in 2005 to evaluate two different accelerated schedules 0, 7, 21 days (group I) and 0, 1, 2 months (group II), of hepatitis B vaccination in adults. There was no significant difference between those two groups in terms of seroprotection at day 365 (before booster vaccination) . A Post-marketing Surveillance Study was conducted in healthy late adolescents not previously received hepatitis B vaccine. Subjects were administered three doses hepatitis B vaccine recombinant (20 μg of HBsAg). Anti HBs antibodies were detected in 95.5% of 113 subjects with anti HBs <10 mIU/mL, all vaccinations were well tolerated . To meet the national immunization program's needs, it requires to maintain the capacity of monovalent hepatitis B production for national use and worldwide. Therefore, new hepatitis B bulk is needed to sustain the availability of this vaccine. This phase II (bridging) study was conducted to evaluate the immunogenicity and safety of new hepatitis B bulk compared to the registered product. Study design and population This trial was an experimental, randomized, double blind, prospective intervention study conducted by Faculty Medicine of Diponegoro University Semarang Indonesia. The study population are children, adolescents, and adults: 10-40 years old. The subjects were grouped into three different batch of monovalent hepatitis B with new bulk and one group receive registered monovalent hepatitis B as as a control. The subjects were randomized per treatment group and was allocated by a randomization list so that to each subject, only one randomly assigned treatment group (A/B/C/D). Subject recruited in primary school and junior high school at Tembalang Semarang and Nasional Diponegoro Hospital Semarang during September 2020 and finishing in June 2020. https://www.ecevr.org/ https://doi.org/10.7774/cevr.2022.11.1.43 Inclusion criteria were healthy individuals as determined by clinical judgment, including medical history and physical examination; subjects/parents/guardian(s) have been informed regarding the study and signed the informed consent form/ informed assent form; subject/parents/guardian(s) will commit to comply with the investigator's instructions and the trial schedule. Subject concomitantly enrolled or scheduled to be enrolled in another trial; known history of hepatitis B contained vaccination in the last 10 years; evolving severe illness and/or chronic disease and fever (axillary temperature >37.5°C) within the 48 hours preceding enrollment; known history of allergy to any component of the vaccines (based on anamnesis); HB-sAg positive; known history of immunodeficiency disorder (HIV infection, leukemia, lymphoma, or malignancy); with history of uncontrolled coagulopathy or blood disorders contraindicating intramuscular injection; subject who has received a treatment in the previous 4 weeks is likely to alter the immune response (intravenous immunoglobulins, blood-derived products, or corticosteroid therapy and other immunosuppressant) were excluded from the trial. Subjects with pregnancy and have been immunized with any vaccine within 4 weeks prior and expects to receive other vaccines within 4 weeks following immunization were also excluded. Study procedure Three days before the immunization visit, subjects were invited to receive information about the study procedure. After signing the informed consent form, and following the steps of evaluation, preimmunization blood was collected, and will come again for the immunization after receiving the result of HBsAg test. Each subject will receive 3 doses of vaccine with the interval of 1 month. The last visit will be 1 month after the last dose. Study intervention Each dose of recombinant hepatitis B vaccine (1 mL) is an inactivated HbsAg produced in yeast cells (Hansenula polymorpha) using recombinant DNA technology. It is a whitish liquid produced by culture genetically engineered yeast cell which carry the relevant gene of the HbsAg. The inactivated HbsAg (bulk) is imported from Serum Institute of India and then formulated and filled at Bio Farma batch 3660118S, 3660218S, and 3660318S. The control vaccine used in this trial was recombinant hepatitis B vaccine (1 mL), an inactivated HbsAg (bulk) imported from The Janssen Vaccine Corp. (Johnson & Johnson, New Brunswick, NJ, USA) and then formulated and filled at Bio Farma batch number 3660318. It had already been licensed. Both vaccines have similar packaging, so the trial could be done double-blind. Safety and immunogenicity evaluation The intensity, duration, and relation of each adverse event to the trial vaccines were evaluated at 30 minutes, 72 hours, and 28 days after injection by interviewing the subjects during the post-surveillance visits: V1, V2, V3, and V4. Any serious adverse events and the study process were reported and evaluated by the Data and Safety Monitoring Board (DSMB). Four mL of blood were collected at visit V0 and V4. The V0 blood sample was divided into two aliquots. The first was used for the HbsAg test before recruitment, and the second aliquot will be stored as the pre-immunization samples. The V4 blood sample was stored as the post-immunization samples. Chemiluminescent Microparticle Immunoassay wase conducted in Commercial Laboratory in Bandung using kit reagent from Abbott Laboratories (Abbott Park, IL, USA). The anti-HBS was performed using Architect ausab reagent kit on architect i 1000sr (Abbott Laboratories). HBsAg was tested using same method but special kit for HBsAg detection. Sample size and analysis Sample size is determined based on 95% confidence interval (CI) and power of the test 80%. Using sample size formula for comparing two a population proportion. To evaluate immunogenicity and safety in three consecutive batches of recombinant hepatitis B vaccine with different source bulk, sample size is determined based on formula for studies one proportion. With the assumption that not all of the subject could complete the study, the total number of subjects will be added at least 20%, and will be involved 134 subjects for each group, and totally 536 subjects for the whole study, with expectation the sample size was suitable for both formulas. The GMT and percentage of subjects with anti with anti HbsAg >10 IU/mL 28 days after last injection with their 95% CIs were described before and 1 month after the last dose. The randomization code was opened after the laboratory officially released the test results to the investigators. Vaccine safety was analyzed by computing the number and percentage of any adverse events experienced by subjects. All data were analyzed using IBM SPSS ver. 20.0 (IBM Corp., Armonk, NY, USA). Reporting of serious adverse events Every serious adverse event occurring throughout the trial reported to the DSMB, sponsor, and Ethics Committee, by the investigator as soon as he/she is alerted of it, i.e., within 24 hours, even if the investigator considers that the adverse event is not related to the treatment. Notification was made by phone message/email: then the investigator immediately sent the completed alert form to Ethics Committee. A copy of the alert form should be sent to sponsor. Causality assessment The causal relationship between the investigational product and each adverse event will be categorized by the investigator, sponsor, and DSMB with the WHO classification . Results All blinded samples at V0 and V4 have been tested from September 2019 to 1 March 2020. The randomization code was unblinded on March 4, 2020, which was attended by sponsor, investigator, and unblinded team. Vaccine code A was batch number 0218S; vaccine code B was batch number 0318 (control); vaccine code C was no batch 0118S; vaccine code D was No. no batch 0318S. As many as 536 subjects participated in this study. Fifteen participants were not completed the study because seven were terminated due to lost to follow-up, four terminated due to misallocation of vaccine code, and the other (n=4) withdrew because of moving away from the study area. Data on demographic and baseline characteristics are presented in Table 1. About the primary criteria regarding the number and percentage of subjects with anti-HBsAg ≥10 mIU/mL in children and adolescents (group I) are shown in Tables 2 and 3. Percentage of subjects with HbsAg ≥10 mIU/mL in children and adolescent (group I) 28 days after threes doses of vaccination was 100%, 98.51%, 100%, and 100% in batch number 0118S, 0218S, 0318S, and 0318 (control), respectively ( Table 2). Percentage of subjects with HbsAg ≥10 mIU/mL in adult group (group II) 28 days after threes doses of vaccination was 100%, 100%, 100%, and 98.46% in batch number 0118S, 0218S, 0318S, and control, respectively. After vaccination with hepatitis B vaccines recombinant, at least 98.51% children and adolescent, and 98.46% of adults reached protective level against hepatitis B. There was no significant different of seroprotection between group (Table 3). The following tables describe the immunogenicity of investigational products in all subjects (Table 4). One month after three doses of recombinant hepatitis B vaccination, the GMT of anti-HBsAg in batch 0118S, 0218S, and 0318S was 4,088.826 IU/mL, 8,214.546 IU/mL, and 5,154.18 IU/mL, respectively. Percentage of subjects with increasing antibody titer ≥4 times was 100%, 100%, and 100% in batch 0118S, 0218S, and 0318S, respectively. The percentage of subjects with the transition of seronegative to seropositive was 100%, 98.99%, and 100% in batch 0118S, 0218S, and 0318S, respectively. There was a significant difference in GMT 28 days following of investigational product between batch numbers of Bio Farma hepatitis B vaccine. There was no significant difference in seroprotection (p=1) and seroconversion (p>0.05) following of investigational product between batch number of Bio Farma Hepatitis B vaccinee (Table 6). Safety The following table shows the results of the evaluation regarding safety after dosing vaccination (Table 7). If the vaccine group was analysis between batch vaccines, most of adverse reactions were not significant difference (Table 8). During the study, seven subjects were hospitalized, four for vaccine group and three from control groups. All events were reported to the Ethics Committee and DSMB. The causality assessment is coincidental and not related to vaccine or vaccination. Discussion In Indonesia, the hepatitis B vaccination program for infants has been implemented nationally since 1997 . The plasma-derived vaccine was previously used until 1977, which was then replaced by a recombinant hepatitis B vaccine . Several studies have shown differences in the protection between plasma-derived and recombinant vaccines, where recombinant vaccines have a higher protective effect . The administration of the hepatitis B vaccine was carried out with the recommendation of one hepatitis B-0 (within the first 24 hours after birth) and three doses pentavalent vaccine (DPT /hepatitis B/Haemophilus influenzae type b). In 2013, the coverage of hepatitis B-0 and three doses of DPT-hepatitis B in children aged 12-23 months were 79.1% and 75.6%, respectively. These numbers indicate there are still children who have not received the hepatitis B vaccine yet . From our study, administering three doses of recombinant hepatitis B vaccine provided protection for 519 subjects (99.61%). This result is also supported by several other studies. Arjmand et al. reported that administration of three doses of hepatitis B vaccine in a pentavalent vaccine resulted in protection against 98.3% (393 children) of the total subjects (400 children). Vaccine administration routes could also affect immunogenicity. Research in Japan on subjects aged 20-35 years old reported that the intramuscular route was more immunogenic than subcutaneous . A study reported that the 4-dose hepatitis B vaccine produced higher protection than 3-doses https://www.ecevr.org/ https://doi.org/10.7774/cevr.2022.11.1.43 administration . Research in Singapore reported 3-dose hepatitis B vaccine, both monovalent and the vaccine combination showed promising results. There were no significant differences in reactivity between the four vaccine codes given in this study in both groups. After administering 3 doses of the vaccine, two subjects (0.38%) did not undergo seroconversion, one subject from group I who received the vaccine code A and one subject from group II who received the vaccine code B. This number is still lower compared to several studies in the United States that reported up to 5%-10% of vaccine recipients did not respond . A study in Iran, with a total of 538 subjects, reported there were 15.6% who failed to produce antibody ti-ters >10 mIU/mL. This can be caused by several factors such as the environment and genetics, human leukocyte antigen (HLA), and immune tolerance of each individual . A study in Iran, with a total of 538 subjects, reported there were 15.6% who failed to produce antibody titers >10 mIU/mL. This can be caused by several factors such as the environment and genetics, HLA, and immune tolerance of each individual . In our study, the most common solicited local reaction was local pain. The majority of local pain was reported to occur within 30 minutes after any dose, with the group I was 15.6%-35.8% and group II was 21.5%-38.8%. These results are consistent with previous research conducted in India that found injection site pain of 11.5% of the total subjects as the most https://www.ecevr.org/ https://doi.org/10.7774/cevr.2022.11.1.43 local reaction . Other study in Thailand also reported that the most common adverse event was pain at the injection site (42.4%) . The majority of local pain in our study was reported to occur within 30 minutes after any dose. This result is different from the study in 2014, which reported that local pain was found more in 1-day post-vaccination than 30 minutes after . There was no significant difference between solicited systemic reactions. Most of the systemic reactions were below 10% after any dose. On the contrary, other study reported that most of the adverse events following immunization from 1,013 reports were general systemic reactions (47%) followed by local reactions (26%) . Fatigue was the most common systemic reaction in both groups after the first and second doses. This finding is consistent with studies conducted in Malaysia and Thailand that reported fatigue as the most common systemic reaction following hepatitis B vaccination . On the other hand, a study reported that fever was the main adverse event (58.9%) . Induration and rash also noted as the main adverse events in Brazil . Almost all of the local and systemic reactions were mild. Most of the reactions, local and systemic, resolved spontaneously in within 72 hours after vaccination. This also supported by a study conducted in Indonesia . There were no significant differences of adverse events between the four vaccine codes. Several serious adverse events reported during the study were considered unrelated to the study vaccine and the procedure. The investigational recombinant hepatitis B vaccine has proven high immunogenicity and an acceptable safety profile. This study supports the conclusion that the Bio Farma recombinant hepatitis B with new sources of hepatitis B bulk is a suitable complement for the licensed equivalent vaccines based on similar safety profiles and antibody responses to the vaccine antigens after 3-dose primary vaccination series. In conclusion, all three investigational vaccines are shown to be equally immunogenic and safety as the control vaccine. There were no significant differences in immunogenicity results and adverse events between the investigational product and control. No significant difference of immunogenicity result and adverse events between each batch number of Bio Farma hepatitis B vaccine.
package test.vetjobs.oa; import com.viaoa.object.*; import com.viaoa.hub.*; import com.viaoa.annotation.*; import com.viaoa.util.OADate; @OAClass( shortName = "job", displayName = "Job" ) @OATable( indexes = { @OAIndex(name = "JobState", columns = {@OAIndexColumn(name = "State")}), @OAIndex(name = "JobRefreshDate", columns = {@OAIndexColumn(name = "RefreshDate")}), @OAIndex(name = "JobReference", columns = {@OAIndexColumn(name = "Reference")}), @OAIndex(name = "JobEmployer", columns = { @OAIndexColumn(name = "EmployerId") }), } ) public class Job extends OAObject { private static final long serialVersionUID = 1L; public static final String P_Id = "Id"; public static final String P_Reference = "Reference"; public static final String P_CreateDate = "CreateDate"; public static final String P_RefreshDate = "RefreshDate"; public static final String P_RateFrom = "RateFrom"; public static final String P_RateTo = "RateTo"; public static final String P_Hourly = "Hourly"; public static final String P_Contract = "Contract"; public static final String P_Fulltime = "Fulltime"; public static final String P_Title = "Title"; public static final String P_Benefits = "Benefits"; public static final String P_Description = "Description"; public static final String P_City = "City"; public static final String P_Region = "Region"; public static final String P_State = "State"; public static final String P_Country = "Country"; public static final String P_Contact = "Contact"; public static final String P_Email = "Email"; public static final String P_AutoResponse = "AutoResponse"; public static final String P_PositionsAvailable = "PositionsAvailable"; public static final String P_ViewCount = "ViewCount"; public static final String P_SearchCount = "SearchCount"; public static final String P_ClickCount = "ClickCount"; public static final String P_ViewCountMTD = "ViewCountMTD"; public static final String P_SearchCountMTD = "SearchCountMTD"; public static final String P_ClickCountMTD = "ClickCountMTD"; public static final String P_ViewCountWTD = "ViewCountWTD"; public static final String P_SearchCountWTD = "SearchCountWTD"; public static final String P_ClickCountWTD = "ClickCountWTD"; public static final String P_LastMTD = "LastMTD"; public static final String P_LastWTD = "LastWTD"; public static final String P_Categories = "Categories"; public static final String P_Locations = "Locations"; public static final String P_Employer = "Employer"; public static final String P_Folder = "Folder"; public static final String P_BatchRows = "BatchRows"; protected int id; protected String reference; protected OADate createDate; protected OADate refreshDate; protected float rateFrom; protected float rateTo; protected boolean hourly; protected boolean contract; protected boolean fulltime; protected String title; protected String benefits; protected String description; protected String city; protected String region; protected String state; protected String country; protected String contact; protected String email; protected boolean autoResponse; protected int positionsAvailable; protected int viewCount; protected int searchCount; protected int clickCount; protected int viewCountMTD; protected int searchCountMTD; protected int clickCountMTD; protected int viewCountWTD; protected int searchCountWTD; protected int clickCountWTD; protected OADate lastMTD; protected OADate lastWTD; // Links to other objects. protected transient Hub<Category> hubCategories; protected transient Hub<Location> hubLocations; protected transient Employer employer; protected transient Folder folder; protected transient Hub<BatchRow> hubBatchRows; public Job() { } public Job(int id) { this(); setId(id); } @OAProperty(displayLength = 5) @OAId() @OAColumn(sqlType = java.sql.Types.INTEGER) public int getId() { return id; } public void setId(int newValue) { int old = id; this.id = newValue; firePropertyChange(P_Id, old, this.id); } @OAProperty(maxLength = 35, displayLength = 9) @OAColumn(maxLength = 35) public String getReference() { return reference; } public void setReference(String newValue) { String old = reference; this.reference = newValue; firePropertyChange(P_Reference, old, this.reference); } @OAProperty(displayName = "Create Date", displayLength = 10) @OAColumn(sqlType = java.sql.Types.DATE) public OADate getCreateDate() { return createDate; } public void setCreateDate(OADate newValue) { OADate old = createDate; this.createDate = newValue; firePropertyChange(P_CreateDate, old, this.createDate); } @OAProperty(displayName = "Refresh Date", displayLength = 10) @OAColumn(sqlType = java.sql.Types.DATE) public OADate getRefreshDate() { return refreshDate; } public void setRefreshDate(OADate newValue) { OADate old = refreshDate; this.refreshDate = newValue; firePropertyChange(P_RefreshDate, old, this.refreshDate); } @OAProperty(displayName = "Rate From", decimalPlaces = 2, displayLength = 8) @OAColumn(sqlType = java.sql.Types.FLOAT) public float getRateFrom() { return rateFrom; } public void setRateFrom(float newValue) { float old = rateFrom; this.rateFrom = newValue; firePropertyChange(P_RateFrom, old, this.rateFrom); } @OAProperty(displayName = "Rate To", decimalPlaces = 2, displayLength = 6) @OAColumn(sqlType = java.sql.Types.FLOAT) public float getRateTo() { return rateTo; } public void setRateTo(float newValue) { float old = rateTo; this.rateTo = newValue; firePropertyChange(P_RateTo, old, this.rateTo); } @OAProperty(displayLength = 6) @OAColumn(sqlType = java.sql.Types.BOOLEAN) public boolean getHourly() { return hourly; } public void setHourly(boolean newValue) { boolean old = hourly; this.hourly = newValue; firePropertyChange(P_Hourly, old, this.hourly); } @OAProperty(displayLength = 8) @OAColumn(sqlType = java.sql.Types.BOOLEAN) public boolean getContract() { return contract; } public void setContract(boolean newValue) { boolean old = contract; this.contract = newValue; firePropertyChange(P_Contract, old, this.contract); } @OAProperty(displayLength = 5) @OAColumn(sqlType = java.sql.Types.BOOLEAN) public boolean getFulltime() { return fulltime; } public void setFulltime(boolean newValue) { boolean old = fulltime; this.fulltime = newValue; firePropertyChange(P_Fulltime, old, this.fulltime); } @OAProperty(maxLength = 75, displayLength = 5) @OAColumn(maxLength = 75) public String getTitle() { return title; } public void setTitle(String newValue) { String old = title; this.title = newValue; firePropertyChange(P_Title, old, this.title); } @OAProperty(maxLength = 8, displayLength = 8) @OAColumn(sqlType = java.sql.Types.CLOB) public String getBenefits() { return benefits; } public void setBenefits(String newValue) { String old = benefits; this.benefits = newValue; firePropertyChange(P_Benefits, old, this.benefits); } @OAProperty(maxLength = 11, displayLength = 11) @OAColumn(sqlType = java.sql.Types.CLOB) public String getDescription() { return description; } public void setDescription(String newValue) { String old = description; this.description = newValue; firePropertyChange(P_Description, old, this.description); } @OAProperty(maxLength = 50, displayLength = 4) @OAColumn(maxLength = 50) public String getCity() { return city; } public void setCity(String newValue) { String old = city; this.city = newValue; firePropertyChange(P_City, old, this.city); } @OAProperty(maxLength = 50, displayLength = 6) @OAColumn(maxLength = 50) public String getRegion() { return region; } public void setRegion(String newValue) { String old = region; this.region = newValue; firePropertyChange(P_Region, old, this.region); } @OAProperty(maxLength = 30, displayLength = 5) @OAColumn(maxLength = 30) public String getState() { return state; } public void setState(String newValue) { String old = state; this.state = newValue; firePropertyChange(P_State, old, this.state); } @OAProperty(maxLength = 45, displayLength = 5) @OAColumn(maxLength = 45) public String getCountry() { return country; } public void setCountry(String newValue) { String old = country; this.country = newValue; firePropertyChange(P_Country, old, this.country); } @OAProperty(maxLength = 75, displayLength = 7) @OAColumn(maxLength = 75) public String getContact() { return contact; } public void setContact(String newValue) { String old = contact; this.contact = newValue; firePropertyChange(P_Contact, old, this.contact); } @OAProperty(maxLength = 200, displayLength = 5) @OAColumn(maxLength = 200) public String getEmail() { return email; } public void setEmail(String newValue) { String old = email; this.email = newValue; firePropertyChange(P_Email, old, this.email); } @OAProperty(displayName = "Auto Response", displayLength = 12) @OAColumn(sqlType = java.sql.Types.BOOLEAN) public boolean getAutoResponse() { return autoResponse; } public void setAutoResponse(boolean newValue) { boolean old = autoResponse; this.autoResponse = newValue; firePropertyChange(P_AutoResponse, old, this.autoResponse); } @OAProperty(displayName = "Positions Available", displayLength = 18) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getPositionsAvailable() { return positionsAvailable; } public void setPositionsAvailable(int newValue) { int old = positionsAvailable; this.positionsAvailable = newValue; firePropertyChange(P_PositionsAvailable, old, this.positionsAvailable); } @OAProperty(displayName = "View Count", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getViewCount() { return viewCount; } public void setViewCount(int newValue) { int old = viewCount; this.viewCount = newValue; firePropertyChange(P_ViewCount, old, this.viewCount); } @OAProperty(displayName = "Search Count", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getSearchCount() { return searchCount; } public void setSearchCount(int newValue) { int old = searchCount; this.searchCount = newValue; firePropertyChange(P_SearchCount, old, this.searchCount); } @OAProperty(displayName = "Click Count", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getClickCount() { return clickCount; } public void setClickCount(int newValue) { int old = clickCount; this.clickCount = newValue; firePropertyChange(P_ClickCount, old, this.clickCount); } @OAProperty(displayName = "View Count MTD", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getViewCountMTD() { return viewCountMTD; } public void setViewCountMTD(int newValue) { int old = viewCountMTD; this.viewCountMTD = newValue; firePropertyChange(P_ViewCountMTD, old, this.viewCountMTD); } @OAProperty(displayName = "Search Count MTD", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getSearchCountMTD() { return searchCountMTD; } public void setSearchCountMTD(int newValue) { int old = searchCountMTD; this.searchCountMTD = newValue; firePropertyChange(P_SearchCountMTD, old, this.searchCountMTD); } @OAProperty(displayName = "Click Count MTD", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getClickCountMTD() { return clickCountMTD; } public void setClickCountMTD(int newValue) { int old = clickCountMTD; this.clickCountMTD = newValue; firePropertyChange(P_ClickCountMTD, old, this.clickCountMTD); } @OAProperty(displayName = "View Count WTD", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getViewCountWTD() { return viewCountWTD; } public void setViewCountWTD(int newValue) { int old = viewCountWTD; this.viewCountWTD = newValue; firePropertyChange(P_ViewCountWTD, old, this.viewCountWTD); } @OAProperty(displayName = "Search Count WTD", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getSearchCountWTD() { return searchCountWTD; } public void setSearchCountWTD(int newValue) { int old = searchCountWTD; this.searchCountWTD = newValue; firePropertyChange(P_SearchCountWTD, old, this.searchCountWTD); } @OAProperty(displayName = "Click Count WTD", displayLength = 5) @OAColumn(sqlType = java.sql.Types.INTEGER) public int getClickCountWTD() { return clickCountWTD; } public void setClickCountWTD(int newValue) { int old = clickCountWTD; this.clickCountWTD = newValue; firePropertyChange(P_ClickCountWTD, old, this.clickCountWTD); } @OAProperty(displayName = "Last MTD", displayLength = 7) @OAColumn(sqlType = java.sql.Types.DATE) public OADate getLastMTD() { return lastMTD; } public void setLastMTD(OADate newValue) { OADate old = lastMTD; this.lastMTD = newValue; firePropertyChange(P_LastMTD, old, this.lastMTD); } @OAProperty(displayName = "Last WTD", displayLength = 7) @OAColumn(sqlType = java.sql.Types.DATE) public OADate getLastWTD() { return lastWTD; } public void setLastWTD(OADate newValue) { OADate old = lastWTD; this.lastWTD = newValue; firePropertyChange(P_LastWTD, old, this.lastWTD); } @OAMany(toClass = Category.class, reverseName = Category.P_Jobs) @OALinkTable(name = "JobCategoryLink", indexName = "CategoryJob", columns = {"JobId"}) public Hub<Category> getCategories() { if (hubCategories == null) { hubCategories = (Hub<Category>) getHub(P_Categories); } return hubCategories; } @OAMany(toClass = Location.class, reverseName = Location.P_Jobs) @OALinkTable(name = "JobLocationLink", indexName = "LocationJob", columns = {"JobId"}) public Hub<Location> getLocations() { if (hubLocations == null) { hubLocations = (Hub<Location>) getHub(P_Locations); } return hubLocations; } @OAOne(reverseName = Employer.P_Jobs, required = true) @OAFkey(columns = {"EmployerId"}) public Employer getEmployer() { if (employer == null) { employer = (Employer) getObject(P_Employer); } return employer; } public void setEmployer(Employer newValue) { Employer old = this.employer; this.employer = newValue; firePropertyChange(P_Employer, old, this.employer); } @OAOne(reverseName = Folder.P_Jobs) @OAFkey(columns = {"FolderId"}) public Folder getFolder() { if (folder == null) { folder = (Folder) getObject(P_Folder); } return folder; } public void setFolder(Folder newValue) { Folder old = this.folder; this.folder = newValue; firePropertyChange(P_Folder, old, this.folder); } @OAMany(displayName = "Batch Rows", toClass = BatchRow.class, reverseName = BatchRow.P_Job) public Hub<BatchRow> getBatchRows() { if (hubBatchRows == null) { hubBatchRows = (Hub<BatchRow>) getHub(P_BatchRows); } return hubBatchRows; } }
In 2008, union thugs and members of the New Black Panthers showed up at certain polling places in Ohio and Philadelphia to intimidate Republican voters. To be clear, they didn’t say they were there for that reason. Rather, they said they were there to be sure everyone got to vote (wink, wink, nod, nod.) Members of the New Black Panthers plan to be in Philly again today for the same reasons, ubiquitously, and union thugs will be joining them where possible. Enter Retired Navy Captain Benjamin Brink, who is sending out “former and retired Special Forces and SEALS” to balance out the presence of the union thugs and New Black Panthers in Philly and elsewhere. Brink’s goal is to be sure Romney supporters aren’t intimidated into leaving the polls without voting, and to accomplish it, his slogan is “Get Out the Vet.” IMPORTANT: Brink makes it clear that the Special Forces and SEALS are not being sent to engage “intimidators” but to observe and report voter intimidation by Obama’s thugs. In this way, those taking part in Brink’s “Get Out the Vet” will be able to aide police and other officials on the ground whose personnel are simply spread too thin. Here’s the bottom line: Voters who may have otherwise been intimidated by union thugs or the presence of the New Black Panthers will certainly be encouraged when they see their polls flanked by heroes of the U.S. military: heroes driven not by the revenge Obama can’t quit talking about, but by “Love of Country.” God bless Benjamin Brink and the Special Forces and SEALS that stand with him today.
import fs from 'fs'; import path from 'path'; import {expect} from 'chai'; import {FileCleanner, FilePiper} from '../src/piper'; import {MakeDirs} from '../src/piper/utils'; import Analyse from '../src/piper/analyse'; describe('piper文件中转功能单元测试', () => { describe('FileCleanner()清除文件方法测试', () => { let tmpDir = '', tmp1 = '', tmp2 = '' beforeEach(() => { tmpDir = path.resolve(__dirname, './tmp'); tmp1 = path.resolve(__dirname, './tmp/tmp1.js'); tmp2 = path.resolve(__dirname, './tmp/tmp2.js'); if(!fs.existsSync(tmpDir)) { fs.mkdirSync(tmpDir); } if(!fs.existsSync(tmp1)) { fs.writeFileSync(tmp1, '测试文件1'); } if(!fs.existsSync(tmp2)) { fs.writeFileSync(tmp2, '测试文件2'); } }) after(() => { if(fs.existsSync(tmpDir)) { if(fs.existsSync(tmp1)) { fs.unlinkSync(tmp1); } if(fs.existsSync(tmp2)) { fs.unlinkSync(tmp2); } fs.rmdirSync(tmpDir) } }) it('FileCleanner()方法返回值是一个promise', () => { expect(FileCleanner(tmp1)).to.be.a('promise'); }); it('FileCleanner()可以删除某单个文件', async () => { await FileCleanner(tmp1); expect(fs.existsSync(tmp1)).to.be.not.ok; }); it('FileCleanner()可以同时删除多个文件', async () => { await FileCleanner(tmp1, tmp2); expect(fs.existsSync(tmp1)).to.be.not.ok; expect(fs.existsSync(tmp2)).to.be.not.ok; }); it('FileCleanner()可以同时删除整个目录', async () => { await FileCleanner(tmpDir); expect(fs.existsSync(tmpDir)).to.be.not.ok; }); }) describe('FilePiper()文件处理转移测试', () => { let tmpDir: string, tmp1: string, tmp2: string, tmp3: string, tmp4: string, tmpCSSDir: string, targetDir: string; before(() => { targetDir = path.resolve(__dirname, './target'); tmpDir = path.resolve(__dirname, './tmp'); tmpCSSDir = path.resolve(__dirname, './tmp/css'); tmp1 = path.resolve(__dirname, './tmp/tmp1.text'); tmp2 = path.resolve(__dirname, './tmp/tmp2.js'); tmp3 = path.resolve(__dirname, './tmp/tmp3.json'); tmp4 = path.resolve(__dirname, './tmp/css/tmp4.css'); if(!fs.existsSync(tmpDir)) { fs.mkdirSync(tmpDir); } if(!fs.existsSync(tmpCSSDir)) { fs.mkdirSync(tmpCSSDir); } if(!fs.existsSync(tmp1)) { fs.writeFileSync(tmp1, '测试文本<%= locals.author%>'); } if(!fs.existsSync(tmp2)) { fs.writeFileSync(tmp2, `<% if(locals.author) { %>const ddd = "<%= locals.author %>";<% } %>`); } if(!fs.existsSync(tmp3)) { fs.writeFileSync(tmp3, `{"name":"<%= locals.author%>"}`); } if(!fs.existsSync(tmp4)) { fs.writeFileSync(tmp4, '.css{background:<%= locals.author%>}'); } }); beforeEach(async () => { if(fs.existsSync(targetDir)) { await FileCleanner(targetDir) } }) after(async () => { if(fs.existsSync(tmpDir)) { await FileCleanner(tmpDir); await FileCleanner(targetDir); } }) it('FilePiper()方法可以递归复制目录里的所有文件到指定目录', async () => { await FilePiper(tmpDir, targetDir, { parseData: { author: '000' }, }); expect(fs.existsSync(targetDir)).to.be.ok; expect(fs.existsSync(tmp1.replace(tmpDir, targetDir))).to.be.ok; expect(fs.existsSync(tmp2.replace(tmpDir, targetDir))).to.be.ok; expect(fs.existsSync(tmp3.replace(tmpDir, targetDir))).to.be.ok; expect(fs.existsSync(tmpCSSDir.replace(tmpDir, targetDir))).to.be.ok; expect(fs.existsSync(tmp4.replace(tmpDir, targetDir))).to.be.ok; }); it('FilePiper()方法可以传入parseData动态渲染不同内容', async () => { await FilePiper(tmpDir, targetDir, { parseData: { author: '000' }, }); expect(fs.readFileSync(tmp2.replace(tmpDir, targetDir)).toString() === 'const ddd = "000"'); }); it('FilePiper()方法可以传入ignore过滤不需要的文件', async () => { await FilePiper(tmpDir, targetDir, { parseData: { author: '000' }, ignore: [ /tmp3/ ] }); expect(fs.existsSync(tmp2.replace(tmpDir, targetDir))).to.be.ok; expect(fs.existsSync(tmp3.replace(tmpDir, targetDir))).to.be.not.ok; }); it('FilePiper()方法动态渲染内容,默认只包含.js .json文件', async () => { await FilePiper(tmpDir, targetDir, { parseData: { author: '000' }, }); expect(fs.readFileSync(tmp1.replace(tmpDir, targetDir)).toString() === '测试文本<%= locals.author%>'); expect(fs.readFileSync(tmp2.replace(tmpDir, targetDir)).toString() === 'const ddd = "000"'); expect(fs.readFileSync(tmp3.replace(tmpDir, targetDir)).toString() === '{"name":"000"}'); expect(fs.readFileSync(tmp4.replace(tmpDir, targetDir)).toString() === '.css{background:<%= locals.author%>}'); }); it('FilePiper()方法可以配置parseInclude 额外添加需要动态渲染的文件,exlcude 排除指定不需要的动态渲染的文件', async () => { await FilePiper(tmpDir, targetDir, { parseData: { author: '000' }, parseInclude: [ /^.+\.text/, /^.+\.css/, ], parseExclude: [ /tmp3.json$/ ] }); expect(fs.readFileSync(tmp1.replace(tmpDir, targetDir)).toString() === '测试文本000'); expect(fs.readFileSync(tmp2.replace(tmpDir, targetDir)).toString() === 'const ddd = "000"'); expect(fs.readFileSync(tmp3.replace(tmpDir, targetDir)).toString() === '{"name":"<%= author%>"}'); expect(fs.readFileSync(tmp4.replace(tmpDir, targetDir)).toString() === '.css{background:000}'); }); it('FilePiper()方法可以配置parseInclude exlcude 短路径匹配模式具有同样的效果', async () => { await FilePiper(tmpDir, targetDir, { parseData: { author: '000' }, parseInclude: [ { path: './tmp/tmp1.text' }, { path: './tmp/css' } ], parseExclude: [ { path: './tmp/tmp3.json' } ] }); expect(fs.readFileSync(tmp1.replace(tmpDir, targetDir)).toString() === '测试文本000'); expect(fs.readFileSync(tmp2.replace(tmpDir, targetDir)).toString() === 'const ddd = "000"'); expect(fs.readFileSync(tmp3.replace(tmpDir, targetDir)).toString() === '{"name":"<%= author%>"}'); expect(fs.readFileSync(tmp4.replace(tmpDir, targetDir)).toString() === '.css{background:000}'); }); }) describe('Analyse()分析配置文件方法测试', () => { let tmpDir: string, tmp: string, otherDir: string, otherTmp: string, otherTmp2: string; before(() => { tmpDir = path.resolve(__dirname, './tmp'); otherDir = path.resolve(__dirname, './other'); tmp = path.resolve(__dirname, './tmp/.hasakirc.js'); otherTmp = path.resolve(__dirname, './other/.ccc.js'); otherTmp2 = path.resolve(__dirname, './other/.ggg.js'); if(!fs.existsSync(tmpDir)) { fs.mkdirSync(tmpDir); } if(!fs.existsSync(otherDir)) { fs.mkdirSync(otherDir); } if(!fs.existsSync(tmp)) { fs.writeFileSync(tmp, ` module.exports = { parseExclude: [ /\\d/, ], parseInclude: [ /\\w/, ], ignore: [ /\\s/, ], question: [ { type: 'input', message: '请输入作者名称', name: 'author', filter(input) { return input || ''; }, validate(input) { return input && input.length > 0; } }, ] } `) } if(!fs.existsSync(otherTmp)) { fs.writeFileSync(otherTmp, ` module.exports = { parseExclude: [ /\\d/, ], parseInclude: [ /\\w/, ], ignore: [ /\\s/, ], question: [ { type: 'input', message: '请输入作者名称', name: 'author', filter(input) { return input || ''; }, validate(input) { return input && input.length > 0; } }, ] } `) } }); after(() => { if(fs.existsSync(tmpDir)) { if(fs.existsSync(tmp)) { fs.unlinkSync(tmp); } fs.rmdirSync(tmpDir); } if(fs.existsSync(otherDir)) { if(fs.existsSync(otherTmp)) { fs.unlinkSync(otherTmp); } if(fs.existsSync(otherTmp2)) { fs.unlinkSync(otherTmp2); } fs.rmdirSync(otherDir); } }) it('Analyse()方法可以读取指定任意目录位置下的配置文件', () => { const analyseResult1 = Analyse(tmp); const analyseResult2 = Analyse(otherTmp); expect(analyseResult1).to.be.a('object'); expect(analyseResult2).to.be.a('object'); }); it('Analyse()方法读取的配置包含文件中转传输的必要字段', () => { const analyseResult = Analyse(tmp); expect(analyseResult).to.have.property('parseExclude').with.deep.equal([/\d/]); expect(analyseResult).to.have.property('parseInclude').with.deep.equal([/\w/]); expect(analyseResult).to.have.property('ignore').with.deep.equal([/\s/]); expect(analyseResult).to.have.property('question').with.lengthOf(1); }); it('Analyse()方法读取不包含配置文件的目录时返回空对象', () => { const analyseResult = Analyse(otherTmp2); expect(analyseResult).to.deep.equal({}); }); }) describe('Utils 方法测试', () => { const tmp = path.resolve(__dirname, './a') const dir = path.resolve(tmp, './b/c'); before(async () => { if(fs.existsSync(tmp)) { await FileCleanner(path.resolve(__dirname, './a')) } }) after(async () => { if(fs.existsSync(tmp)) { await FileCleanner(path.resolve(__dirname, './a')) } }) it('MakeDirs() 可以创建多级目录', () => { MakeDirs(dir); expect(fs.existsSync(dir)).to.be.ok; expect(fs.statSync(dir).isDirectory()).to.be.ok; }) }) })
/* * Copyright (c) 2021 - <NAME> - https://www.yupiik.com * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.yupiik.uship.webserver.tomcat; import io.yupiik.uship.webserver.tomcat.loader.LaunchingClassLoaderLoader; import jakarta.servlet.DispatcherType; import jakarta.servlet.ServletContext; import jakarta.servlet.annotation.HandlesTypes; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.core.StandardContext; import org.apache.catalina.core.StandardHost; import org.apache.catalina.session.StandardManager; import org.apache.catalina.startup.Tomcat; import org.apache.catalina.util.StandardSessionIdGenerator; import org.apache.catalina.valves.AbstractAccessLogValve; import org.apache.catalina.valves.ErrorReportValve; import org.apache.coyote.AbstractProtocol; import org.apache.tomcat.util.modeler.Registry; import java.io.CharArrayWriter; import java.util.EnumSet; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; import static java.util.Optional.ofNullable; public class TomcatWebServer implements AutoCloseable { private final TomcatWebServerConfiguration configuration; private Tomcat tomcat; protected TomcatWebServer() { this(null); } public TomcatWebServer(final TomcatWebServerConfiguration configuration) { this.configuration = configuration; } public Tomcat getTomcat() { return tomcat; } public int getPort() { return AbstractProtocol.class.cast(tomcat.getConnector().getProtocolHandler()).getLocalPort(); } public synchronized TomcatWebServer create() { if (configuration.isDisableRegistry()) { Registry.disableRegistry(); } final var tomcat = createTomcat(); final var context = createContext(); tomcat.getHost().addChild(context); final var state = context.getState(); if (state == LifecycleState.STOPPED || state == LifecycleState.FAILED) { try { close(); } catch (final RuntimeException re) { // no-op } throw new IllegalStateException("Context didn't start"); } if (configuration.getPort() == 0) { configuration.setPort(getPort()); } return this; } @Override public synchronized void close() { if (tomcat == null) { return; } try { tomcat.stop(); tomcat.destroy(); final var server = tomcat.getServer(); if (server != null) { // give a change to stop the utility executor otherwise it just leaks and stop later final var utilityExecutor = server.getUtilityExecutor(); if (utilityExecutor != null) { try { utilityExecutor.awaitTermination(1, TimeUnit.MINUTES); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); } } } } catch (final LifecycleException e) { throw new IllegalStateException(e); } } protected Tomcat createTomcat() { final var tomcat = newTomcat(); tomcat.setBaseDir(configuration.getBase()); tomcat.setPort(configuration.getPort()); final var host = new StandardHost(); host.setAutoDeploy(false); // note needed to stick to tomcat but neat to enable in customizers: host.setFailCtxIfServletStartFails(true); host.setName(configuration.getDefaultHost()); tomcat.getEngine().addChild(host); if (configuration.getTomcatCustomizers() != null) { configuration.getTomcatCustomizers().forEach(c -> c.accept(tomcat)); } onTomcat(tomcat); try { tomcat.init(); } catch (final LifecycleException e) { try { tomcat.destroy(); } catch (final LifecycleException ex) { // no-op } throw new IllegalStateException(e); } try { tomcat.start(); } catch (final LifecycleException e) { close(); throw new IllegalStateException(e); } return this.tomcat = tomcat; } protected StandardContext createContext() { final var ctx = newContext(); ctx.setLoader(new LaunchingClassLoaderLoader()); ctx.setPath(""); ctx.setName(""); ctx.setFailCtxIfServletStartFails(true); // ctx.setJarScanner(newSkipScanner()); // we don't use scanning at all with this setup so just ignore useless optims for now ctx.addServletContainerInitializer((set, servletContext) -> defaultContextSetup(servletContext), null); configuration.getInitializers().forEach(sci -> ctx.addServletContainerInitializer( sci, ofNullable(sci.getClass().getAnnotation(HandlesTypes.class)).map(HandlesTypes::value).map(this::scanFor).orElseGet(Set::of))); ctx.addLifecycleListener(new Tomcat.FixContextListener()); final var errorReportValve = new ErrorReportValve(); errorReportValve.setShowReport(false); errorReportValve.setShowServerInfo(false); if (configuration.getAccessLogPattern() != null && !configuration.getAccessLogPattern().isBlank()) { final var logValve = new AccessLogValve(); logValve.setPattern(configuration.getAccessLogPattern()); ctx.getPipeline().addValve(logValve); } ctx.getPipeline().addValve(errorReportValve); // avoid warnings ctx.setClearReferencesObjectStreamClassCaches(false); ctx.setClearReferencesThreadLocals(false); ctx.setClearReferencesRmiTargets(false); if (configuration.isFastSessionId()) { final var mgr = new StandardManager(); mgr.setSessionIdGenerator(new FastSessionIdGenerator()); ctx.setManager(mgr); } if (configuration.getContextCustomizers() != null) { configuration.getContextCustomizers().forEach(c -> c.accept(ctx)); } onContext(ctx); return ctx; } protected Tomcat newTomcat() { return new NoBaseDirTomcat(); } protected StandardContext newContext() { return new NoWorkDirContext(); } protected void onTomcat(final Tomcat tomcat) { // no-op } protected void onContext(final StandardContext ctx) { // no-op } protected Set<Class<?>> scanFor(final Class<?>... classes) { return Set.of(); } protected void defaultContextSetup(final ServletContext servletContext) { if (configuration.isSkipUtf8Filter()) { return; } final var encodingFilter = servletContext.addFilter("default-utf8-filter", (servletRequest, servletResponse, filterChain) -> { servletRequest.setCharacterEncoding("UTF-8"); servletResponse.setCharacterEncoding("UTF-8"); filterChain.doFilter(servletRequest, servletResponse); }); encodingFilter.setAsyncSupported(true); encodingFilter.addMappingForUrlPatterns(EnumSet.allOf(DispatcherType.class), true, "/*"); } private static class AccessLogValve extends AbstractAccessLogValve { private final Logger logger = Logger.getLogger("yupiik.webserver.tomcat.access.log"); @Override protected void log(final CharArrayWriter message) { logger.info(message.toString()); } } private static class NoBaseDirTomcat extends Tomcat { @Override protected void initBaseDir() { // no-op } } private static class NoWorkDirContext extends StandardContext { @Override protected void postWorkDirectory() { // no-op } } private static class FastSessionIdGenerator extends StandardSessionIdGenerator { @Override protected void getRandomBytes(final byte bytes[]) { ThreadLocalRandom.current().nextBytes(bytes); } } }
/*BEGIN_LEGAL Copyright (c) 2018 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. END_LEGAL */ /// @file xed-operand-element-xtype-enum.h // This file was automatically generated. // Do not edit this file. #if !defined(XED_OPERAND_ELEMENT_XTYPE_ENUM_H) # define XED_OPERAND_ELEMENT_XTYPE_ENUM_H #include "xed-common-hdrs.h" typedef enum { XED_OPERAND_XTYPE_INVALID, XED_OPERAND_XTYPE_B80, XED_OPERAND_XTYPE_F16, XED_OPERAND_XTYPE_F32, XED_OPERAND_XTYPE_F64, XED_OPERAND_XTYPE_F80, XED_OPERAND_XTYPE_I1, XED_OPERAND_XTYPE_I16, XED_OPERAND_XTYPE_I32, XED_OPERAND_XTYPE_I64, XED_OPERAND_XTYPE_I8, XED_OPERAND_XTYPE_INT, XED_OPERAND_XTYPE_STRUCT, XED_OPERAND_XTYPE_U128, XED_OPERAND_XTYPE_U16, XED_OPERAND_XTYPE_U256, XED_OPERAND_XTYPE_U32, XED_OPERAND_XTYPE_U64, XED_OPERAND_XTYPE_U8, XED_OPERAND_XTYPE_UINT, XED_OPERAND_XTYPE_VAR, XED_OPERAND_XTYPE_LAST } xed_operand_element_xtype_enum_t; /// This converts strings to #xed_operand_element_xtype_enum_t types. /// @param s A C-string. /// @return #xed_operand_element_xtype_enum_t /// @ingroup ENUM XED_DLL_EXPORT xed_operand_element_xtype_enum_t str2xed_operand_element_xtype_enum_t(const char* s); /// This converts strings to #xed_operand_element_xtype_enum_t types. /// @param p An enumeration element of type xed_operand_element_xtype_enum_t. /// @return string /// @ingroup ENUM XED_DLL_EXPORT const char* xed_operand_element_xtype_enum_t2str(const xed_operand_element_xtype_enum_t p); /// Returns the last element of the enumeration /// @return xed_operand_element_xtype_enum_t The last element of the enumeration. /// @ingroup ENUM XED_DLL_EXPORT xed_operand_element_xtype_enum_t xed_operand_element_xtype_enum_t_last(void); #endif
<filename>src/validator/ValidatorHandlers.ts /* eslint-disable */ import * as vscode from "vscode"; import setState, { ValidatorSelectionStateRequest, } from "../binary/requests/setState"; import CompletionOrigin from "../CompletionOrigin"; import { StatePayload } from "../globals/consts"; import { VALIDATOR_IGNORE_REFRESH_COMMAND } from "./commands"; import { StateType } from "./utils"; import { clearCache, Completion, setIgnore, VALIDATOR_BINARY_VERSION, } from "./ValidatorClient"; const IGNORE_VALUE = "__IGNORE__"; export async function validatorClearCacheHandler(): Promise<void> { await clearCache(); setState({ [StatePayload.STATE]: { state_type: StateType.clearCache }, }); } // FIXME: try to find the exact type for the 3rd parameter... export async function validatorSelectionHandler( editor: vscode.TextEditor, edit: vscode.TextEditorEdit, { currentSuggestion, allSuggestions, reference, threshold }: any ): Promise<void> { try { const eventData = eventDataOf( editor, currentSuggestion, allSuggestions, reference, threshold, false ); setState(eventData); } catch (error) { console.error(error); } } export async function validatorIgnoreHandler( editor: vscode.TextEditor, edit: vscode.TextEditorEdit, { allSuggestions, reference, threshold, responseId }: any ): Promise<void> { try { await setIgnore(responseId); vscode.commands.executeCommand(VALIDATOR_IGNORE_REFRESH_COMMAND); const completion: Completion = { value: IGNORE_VALUE, score: 0, }; const eventData = eventDataOf( editor, completion, allSuggestions, reference, threshold, true ); setState(eventData); } catch (error) { console.error(error); } } function eventDataOf( editor: vscode.TextEditor, currentSuggestion: Completion, allSuggestions: Completion[], reference: string, threshold: string, isIgnore = false ): ValidatorSelectionStateRequest { let index = allSuggestions.findIndex((sug) => sug === currentSuggestion); if (index === -1) { index = allSuggestions.length; } const suggestions = allSuggestions.map((sug) => { return { length: sug.value.length, strength: resolveDetailOf(sug), origin: CompletionOrigin.CLOUD, }; }); const { length } = currentSuggestion.value; const selectedSuggestion = currentSuggestion.value; const strength = resolveDetailOf(currentSuggestion); const origin = CompletionOrigin.CLOUD; const language = editor.document.fileName.split(".").pop(); const numOfSuggestions = allSuggestions.length; const eventData: ValidatorSelectionStateRequest = { ValidatorSelection: { language: language!, length, strength, origin, index, threshold, num_of_suggestions: numOfSuggestions, suggestions, selected_suggestion: selectedSuggestion, reference, reference_length: reference.length, is_ignore: isIgnore, validator_version: VALIDATOR_BINARY_VERSION, }, }; return eventData; } function resolveDetailOf(completion: Completion): string { return `${completion.score}%`; }
import displayio from adafruit_macropad import MacroPad from bongo.bongo import Bongo # Create macropad and bongo macropad = MacroPad() bongo = Bongo() # Create a group and add bongo to it group = displayio.Group() group.append(bongo.group) # Show the group macropad.display.show(group) # Main loop while True: key_event = macropad.keys.events.get() bongo.update(key_event)
<gh_stars>0 import numpy as np import server.game_pb2 as game__pb2 from server.game_pb2 import ScoreType from server.game_pb2 import Action from server.YahtzeeGameInterface import YahtzeeGameInterface from server.Judger import Judger class YahtzeeGame(YahtzeeGameInterface): def __init__(self, seed): self.seed = seed self.random = np.random.RandomState(seed=seed) self.status = game__pb2.GameStatus( available_actions=[Action.Roll] ) self.judger = Judger() def get_available_actions(self): return self.status.available_actions def get_available_score_type(self): types = [ScoreType.Ones, ScoreType.Twos, ScoreType.Threes, ScoreType.Fours, ScoreType.Fives, ScoreType.Sixes, ScoreType.Chance, ScoreType.FourOfAKind, ScoreType.FullHouse, ScoreType.SmallStraight, ScoreType.LargeStraight, ScoreType.Yahtzee ] return [x for x in types if self.get_score_by_type(x) is None] def roll(self): status = self.status if game__pb2.Action.Roll in status.available_actions: result = self.random.randint(1, 7, 5) status.dices[:] = result status.available_actions[:] = [ game__pb2.Action.Reroll, game__pb2.Action.Write] status.reroll_times = 2 else: raise Exception( "invailed action") def reroll(self, indexes): status = self.status if game__pb2.Action.Reroll in status.available_actions: result = [self.random.randint(1, 7) if x not in indexes else status.dices[x] for x in range(5)] status.dices[:] = result status.available_actions[:] = [ game__pb2.Action.Reroll, game__pb2.Action.Write] status.reroll_times -= 1 if status.reroll_times > 0: status.available_actions[:] = [ game__pb2.Action.Reroll, game__pb2.Action.Write] else: status.available_actions[:] = [game__pb2.Action.Write] else: raise Exception( "invailed action") def write(self, score_type): status = self.status if score_type == ScoreType.SCORETYPE_INVALID: raise Exception( "invailed selection") if game__pb2.Action.Write in status.available_actions: if self.get_score_by_type(score_type) is not None: raise Exception( "already selected") else: selected_score = self.judger.judge(status.dices, score_type) status.dices[:] = [] status.scores.append(game__pb2.PartScore( type=score_type, score=selected_score)) status.score += selected_score status.turns += 1 if self.is_bonus_available(): status.score += 35 # TODO: hard code status.scores.append(game__pb2.PartScore( type=ScoreType.Bonus, score=35)) if status.turns == 12: status.available_actions[:] = [Action.GameOver] else: status.available_actions[:] = [Action.Roll] else: raise Exception( "invailed action") def get_score_by_type(self, score_type, default_zero=False): status = self.status for score in status.scores: if score.type == score_type: return score.score else: if default_zero: return 0 return None def is_bonus_available(self): if self.get_score_by_type(ScoreType.Bonus) is not None: return False numerics = [ScoreType.Ones, ScoreType.Twos, ScoreType.Threes, ScoreType.Fours, ScoreType.Fives, ScoreType.Sixes] total_numeric_score = sum(self.get_score_by_type( x, default_zero=True) for x in numerics) if total_numeric_score >= 63: # TODO: hard code return True def get_status(self): return self.status
// Copyright 2015 Open Source Robotics Foundation, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #if defined(_WIN32) # error time_unix.c is not intended to be used with win32 based systems #endif // defined(_WIN32) #ifdef __cplusplus extern "C" { #endif #include "rcutils/time.h" #if defined(__MACH__) #include <mach/clock.h> #include <mach/mach.h> #endif // defined(__MACH__) #include <math.h> #include <time.h> #include <unistd.h> #include "./common.h" #include "rcutils/allocator.h" #include "rcutils/error_handling.h" #if !defined(__MACH__) // Assume clock_get_time is available on OS X. // This id an appropriate check for clock_gettime() according to: // http://man7.org/linux/man-pages/man2/clock_gettime.2.html # if !defined(_POSIX_TIMERS) || !_POSIX_TIMERS # warning no monotonic clock function available # endif // !defined(_POSIX_TIMERS) || !_POSIX_TIMERS #endif // !defined(__MACH__) #define __WOULD_BE_NEGATIVE(seconds, subseconds) (seconds < 0 || (subseconds < 0 && seconds == 0)) rcutils_ret_t rcutils_system_time_now(rcutils_time_point_value_t * now) { RCUTILS_CHECK_ARGUMENT_FOR_NULL(now, RCUTILS_RET_INVALID_ARGUMENT); struct timespec timespec_now; #if defined(__MACH__) // On OS X use clock_get_time. clock_serv_t cclock; mach_timespec_t mts; host_get_clock_service(mach_host_self(), CALENDAR_CLOCK, &cclock); clock_get_time(cclock, &mts); mach_port_deallocate(mach_task_self(), cclock); timespec_now.tv_sec = mts.tv_sec; timespec_now.tv_nsec = mts.tv_nsec; #else // defined(__MACH__) // Otherwise use clock_gettime. clock_gettime(CLOCK_REALTIME, &timespec_now); #endif // defined(__MACH__) if (__WOULD_BE_NEGATIVE(timespec_now.tv_sec, timespec_now.tv_nsec)) { RCUTILS_SET_ERROR_MSG("unexpected negative time"); return RCUTILS_RET_ERROR; } *now = RCUTILS_S_TO_NS((int64_t)timespec_now.tv_sec) + timespec_now.tv_nsec; return RCUTILS_RET_OK; } rcutils_ret_t rcutils_steady_time_now(rcutils_time_point_value_t * now) { RCUTILS_CHECK_ARGUMENT_FOR_NULL(now, RCUTILS_RET_INVALID_ARGUMENT); // If clock_gettime is available or on OS X, use a timespec. struct timespec timespec_now; #if defined(__MACH__) // On OS X use clock_get_time. clock_serv_t cclock; mach_timespec_t mts; host_get_clock_service(mach_host_self(), SYSTEM_CLOCK, &cclock); clock_get_time(cclock, &mts); mach_port_deallocate(mach_task_self(), cclock); timespec_now.tv_sec = mts.tv_sec; timespec_now.tv_nsec = mts.tv_nsec; #else // defined(__MACH__) // Otherwise use clock_gettime. #if defined(CLOCK_MONOTONIC_RAW) clock_gettime(CLOCK_MONOTONIC_RAW, &timespec_now); #else // defined(CLOCK_MONOTONIC_RAW) clock_gettime(CLOCK_MONOTONIC, &timespec_now); #endif // defined(CLOCK_MONOTONIC_RAW) #endif // defined(__MACH__) if (__WOULD_BE_NEGATIVE(timespec_now.tv_sec, timespec_now.tv_nsec)) { RCUTILS_SET_ERROR_MSG("unexpected negative time"); return RCUTILS_RET_ERROR; } *now = RCUTILS_S_TO_NS((int64_t)timespec_now.tv_sec) + timespec_now.tv_nsec; return RCUTILS_RET_OK; } #ifdef __cplusplus } #endif
/** * A cache for charts data, implemented as a singleton. * @author Laurent Cohen */ public final class ChartDataCache { /** * The singleton instance of this cache. */ private static final ChartDataCache instance = new ChartDataCache(); /** * Mapping of fields to their collection of values. */ private final CollectionMap<Fields, Double> map = new LinkedListHashMap<>(); /** * Usage counts for the fields. */ private final Map<Fields, AtomicInteger> fieldCount = new HashMap<>(); /** * Instantiation not permitted. */ private ChartDataCache() { } /** * @return the singleton instance of this class. */ public static ChartDataCache getInstance() { return instance; } /** * Add the specified fields. * @param fields the fields to add.. * @param handler holds the updated data. */ public synchronized void addFields(final Fields[] fields, final StatsHandler handler) { final List<Fields> added = new ArrayList<>(fields.length); for (final Fields field: fields) { final AtomicInteger count = fieldCount.get(field); if (count == null) { fieldCount.put(field, new AtomicInteger(1)); added.add(field); } else { count.incrementAndGet(); } } final int count = Math.min(handler.getRolloverPosition(), handler.getStatsCount()); if (count > 0) { for (int i=0; i<count; i++) { final Map<Fields, Double> valueMap = handler.getDoubleValues(i); for (final Fields field: added) map.putValue(field, valueMap.get(field)); } } else { for (final Fields field: added) map.putValue(field, 0d); } } /** * Remove fields that are no longer used. * @param fields the fields to add. */ public synchronized void removeFields(final Fields[] fields) { for (final Fields field: fields) { final AtomicInteger count = fieldCount.get(field); if (count == null) continue; final int n = count.decrementAndGet(); if (n <= 0) { fieldCount.remove(field); map.removeKey(field); } } } /** * Update the values of the fields. * @param handler holds the updated data. */ public synchronized void update(final StatsHandler handler) { final Map<Fields, Double> valueMap = handler.getLatestDoubleValues(); final int count = Math.min(handler.getStatsCount(), handler.getRolloverPosition()); for (final Map.Entry<Fields, Collection<Double>> entry: map.entrySet()) { final Fields field = entry.getKey(); final LinkedList<Double> values = (LinkedList<Double>) entry.getValue(); values.offer(valueMap.get(field)); while (values.size() > count) values.poll(); while (values.size() < count) values.addFirst(0d); } } /** * Get the available data for the specified fields. * @param fields the fields for which to get data. * @return a mapping of fields to the associated data. */ public synchronized Map<Fields, List<Double>> getData(final Fields[] fields) { final Map<Fields, List<Double>> result = new HashMap<>(fields.length); int maxLength = 0; for (final Fields field: fields) { final List<Double> values = (List<Double>) map.getValues(field); final int n = (values == null) ? 0 : values.size(); if (n > maxLength) maxLength = n; } if (maxLength <= 0) maxLength = 1; for (final Fields field: fields) { List<Double> values = (List<Double>) map.getValues(field); if (values == null) { values = new LinkedList<>(); for (int i=0; i<maxLength; i++) values.add(0d); map.addValues(field, values); } else { final int n = values.size(); if (n < maxLength) { for (int i=0; i<maxLength - n; i++) values.add(0d); } } result.put(field, new LinkedList<>(values)); } return result; } }
def countdownDownload(timerDownload, timerupdate, count): stu = timerupdate listwallpaper = returnwallpaper() removeUnwantedPhotos(listwallpaper) listwallpaper = returnwallpaper() WallpaperCount = len(listwallpaper) setwallpaper(listwallpaper, count) count += 1 while timerDownload: while timerupdate: time.sleep(1) timerupdate -= 1 print("End of 15 Minutes, Wallpaper changed!") if(count >= WallpaperCount): count = 0 setwallpaper(listwallpaper, count) count += 1 timerupdate = stu timerDownload = timerDownload - stu dCount = download() return dCount
Parameterless Transductive Feature Re-representation for Few-Shot Learning Recent literature in few-shot learning (FSL) has shown that transductive methods often outperform their inductive counterparts. However, most transductive solutions, particularly the meta-learning based ones, require inserting trainable parameters on top of some inductive baselines to facilitate transduction. In this paper, we propose a parameterless transductive feature re-representation framework that differs from all existing solutions from the following perspectives. (1) It is widely compatible with existing FSL methods, including meta-learning and fine tuning based models. (2) The framework is simple and introduces no extra training parameters when applied to any architecture. We conduct experiments on three benchmark datasets by applying the framework to both representative meta-learning baselines and state-ofthe-art FSL methods. Our framework consistently improves performances in all experiments and refreshes the state-of-the-art FSL results.
<reponame>cisco-ie/cisco-proto /* Copyright 2019 Cisco Systems Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by protoc-gen-go. DO NOT EDIT. // source: pl_pifib_show_police.proto package cisco_ios_xr_platform_pifib_oper_augment_hardware_police import ( fmt "fmt" proto "github.com/golang/protobuf/proto" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type PlPifibShowPolice_KEYS struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *PlPifibShowPolice_KEYS) Reset() { *m = PlPifibShowPolice_KEYS{} } func (m *PlPifibShowPolice_KEYS) String() string { return proto.CompactTextString(m) } func (*PlPifibShowPolice_KEYS) ProtoMessage() {} func (*PlPifibShowPolice_KEYS) Descriptor() ([]byte, []int) { return fileDescriptor_9ab3f3d102aea346, []int{0} } func (m *PlPifibShowPolice_KEYS) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PlPifibShowPolice_KEYS.Unmarshal(m, b) } func (m *PlPifibShowPolice_KEYS) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_PlPifibShowPolice_KEYS.Marshal(b, m, deterministic) } func (m *PlPifibShowPolice_KEYS) XXX_Merge(src proto.Message) { xxx_messageInfo_PlPifibShowPolice_KEYS.Merge(m, src) } func (m *PlPifibShowPolice_KEYS) XXX_Size() int { return xxx_messageInfo_PlPifibShowPolice_KEYS.Size(m) } func (m *PlPifibShowPolice_KEYS) XXX_DiscardUnknown() { xxx_messageInfo_PlPifibShowPolice_KEYS.DiscardUnknown(m) } var xxx_messageInfo_PlPifibShowPolice_KEYS proto.InternalMessageInfo type PlPifibShowPolice struct { FlowType uint32 `protobuf:"varint,50,opt,name=flow_type,json=flowType,proto3" json:"flow_type,omitempty"` Rate uint32 `protobuf:"varint,51,opt,name=rate,proto3" json:"rate,omitempty"` Accepts uint64 `protobuf:"varint,52,opt,name=accepts,proto3" json:"accepts,omitempty"` Drops uint64 `protobuf:"varint,53,opt,name=drops,proto3" json:"drops,omitempty"` TosPrec uint32 `protobuf:"varint,54,opt,name=tos_prec,json=tosPrec,proto3" json:"tos_prec,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *PlPifibShowPolice) Reset() { *m = PlPifibShowPolice{} } func (m *PlPifibShowPolice) String() string { return proto.CompactTextString(m) } func (*PlPifibShowPolice) ProtoMessage() {} func (*PlPifibShowPolice) Descriptor() ([]byte, []int) { return fileDescriptor_9ab3f3d102aea346, []int{1} } func (m *PlPifibShowPolice) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PlPifibShowPolice.Unmarshal(m, b) } func (m *PlPifibShowPolice) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_PlPifibShowPolice.Marshal(b, m, deterministic) } func (m *PlPifibShowPolice) XXX_Merge(src proto.Message) { xxx_messageInfo_PlPifibShowPolice.Merge(m, src) } func (m *PlPifibShowPolice) XXX_Size() int { return xxx_messageInfo_PlPifibShowPolice.Size(m) } func (m *PlPifibShowPolice) XXX_DiscardUnknown() { xxx_messageInfo_PlPifibShowPolice.DiscardUnknown(m) } var xxx_messageInfo_PlPifibShowPolice proto.InternalMessageInfo func (m *PlPifibShowPolice) GetFlowType() uint32 { if m != nil { return m.FlowType } return 0 } func (m *PlPifibShowPolice) GetRate() uint32 { if m != nil { return m.Rate } return 0 } func (m *PlPifibShowPolice) GetAccepts() uint64 { if m != nil { return m.Accepts } return 0 } func (m *PlPifibShowPolice) GetDrops() uint64 { if m != nil { return m.Drops } return 0 } func (m *PlPifibShowPolice) GetTosPrec() uint32 { if m != nil { return m.TosPrec } return 0 } func init() { proto.RegisterType((*PlPifibShowPolice_KEYS)(nil), "cisco_ios_xr_platform_pifib_oper.augment.hardware.police.pl_pifib_show_police_KEYS") proto.RegisterType((*PlPifibShowPolice)(nil), "cisco_ios_xr_platform_pifib_oper.augment.hardware.police.pl_pifib_show_police") } func init() { proto.RegisterFile("pl_pifib_show_police.proto", fileDescriptor_9ab3f3d102aea346) } var fileDescriptor_9ab3f3d102aea346 = []byte{ // 216 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x8f, 0xb1, 0x4e, 0x03, 0x31, 0x0c, 0x86, 0x75, 0x52, 0xa1, 0xc5, 0x12, 0x4b, 0xd4, 0x21, 0xa5, 0x4b, 0xd5, 0xa9, 0xd3, 0x0d, 0x14, 0x10, 0x2f, 0xc0, 0xc4, 0x82, 0x0a, 0x0b, 0x93, 0x95, 0xa6, 0x3e, 0x1a, 0x29, 0xc5, 0x96, 0x63, 0x74, 0xf4, 0x35, 0x78, 0x62, 0x44, 0x4a, 0xb7, 0xdb, 0xfc, 0x7f, 0xb6, 0x3f, 0xe9, 0x87, 0x1b, 0xc9, 0x28, 0xa9, 0x4b, 0x5b, 0x2c, 0x7b, 0xee, 0x51, 0x38, 0xa7, 0x48, 0xad, 0x28, 0x1b, 0xbb, 0xc7, 0x98, 0x4a, 0x64, 0x4c, 0x5c, 0xf0, 0x5b, 0x51, 0x72, 0xb0, 0x8e, 0xf5, 0xf0, 0x7f, 0xce, 0x42, 0xda, 0x86, 0xaf, 0x8f, 0x03, 0x7d, 0x5a, 0xbb, 0x0f, 0xba, 0xeb, 0x83, 0x52, 0x7b, 0xfa, 0x5f, 0xce, 0x61, 0x36, 0xe4, 0xc5, 0xe7, 0xa7, 0xf7, 0xd7, 0xe5, 0x4f, 0x03, 0xd3, 0xa1, 0xad, 0x9b, 0xc3, 0x55, 0x97, 0xb9, 0x47, 0x3b, 0x0a, 0xf9, 0xdb, 0x45, 0xb3, 0xba, 0xde, 0x4c, 0xfe, 0xc0, 0xdb, 0x51, 0xc8, 0x39, 0x18, 0x69, 0x30, 0xf2, 0xeb, 0xca, 0xeb, 0xec, 0x3c, 0x8c, 0x43, 0x8c, 0x24, 0x56, 0xfc, 0xdd, 0xa2, 0x59, 0x8d, 0x36, 0xe7, 0xe8, 0xa6, 0x70, 0xb1, 0x53, 0x96, 0xe2, 0xef, 0x2b, 0x3f, 0x05, 0x37, 0x83, 0x89, 0x71, 0x41, 0x51, 0x8a, 0xfe, 0xa1, 0x7a, 0xc6, 0xc6, 0xe5, 0x45, 0x29, 0x6e, 0x2f, 0x6b, 0xe5, 0xf5, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x75, 0x32, 0xb4, 0x40, 0x10, 0x01, 0x00, 0x00, }
def execute_command_with_pipe(command, source_file_to_pipe=None, destination_file_to_pipe=None): if (source_file_to_pipe is None and destination_file_to_pipe is None) or \ (source_file_to_pipe is not None and destination_file_to_pipe is not None): raise ValueError("Either source is specified, or destination is specified") azcopy_path = os.path.join(util.test_directory_path, util.azcopy_executable_name) command = azcopy_path + " " + command if source_file_to_pipe is not None: try: ps = subprocess.Popen(('cat', source_file_to_pipe), stdout=subprocess.PIPE) subprocess.check_output(shlex.split(command), stdin=ps.stdout, timeout=360) ps.wait(360) return True except CalledProcessError: return False if destination_file_to_pipe is not None: with open(destination_file_to_pipe, "wb") as output, open('fake_input.txt', 'wb') as fake_input: try: subprocess.check_call(shlex.split(command), stdin=fake_input, stdout=output, timeout=360) return True except CalledProcessError: return False
#include <iostream> #include <QtCore/QFile> #include <QtCore/QDir> #include <QtCore/QDirIterator> #include <QtCore/QFileInfo> #include <QtCore/QString> #include <QtCore/QStringList> #include <QtCore/QtDebug> #include "TestFileLocations.h" // ----------------------------------------------------------------------------- // // ----------------------------------------------------------------------------- void buildInitializerList(QString hFile, QString cppFile) { // Read the header file QFile h(hFile); h.open(QFile::ReadOnly); QString header = h.readAll(); h.close(); QStringList list; QStringList initializerList; list = header.split(QRegExp("\\n")); QStringListIterator lines(list); while(lines.hasNext()) { QString line = lines.next(); // std::cout << line.toStdString() << std::endl; if(line.contains(QString("SIMPL_INSTANCE_STRING_PROPERTY"))) { QStringList chunks = line.split(QRegExp("\\(")); chunks = chunks.at(1).split(QRegExp("\\)")); QString s = QString("m_") + chunks.at(0); s = s.trimmed(); initializerList << s; } if(line.contains(QString("SIMPL_INSTANCE_PROPERTY"))) { QStringList chunks = line.split(QRegExp(", ")); chunks = chunks.at(1).split(QRegExp("\\)")); QString s = QString("m_") + chunks.at(0); s = s.trimmed(); initializerList << s; } if(line.contains(QString("DEFINE_DATAARRAY_VARIABLE"))) { QStringList chunks = line.split(QRegExp(", ")); chunks = chunks.at(1).split(QRegExp("\\)")); QString s = QString("m_") + chunks.at(0) + QString("ArrayName"); initializerList << s; s = QString("m_") + chunks.at(0) + "(nullptr)"; s = s.trimmed(); initializerList << s; } } // lines = QStringListIterator(initializerList); // while (lines.hasNext()) // { // std::cout << lines.next().toStdString() << std::endl; // } // now read the source file QFile source(cppFile); source.open(QFile::ReadOnly); QString cpp = source.readAll(); source.close(); QFileInfo fi(hFile); QFile outSource(cppFile); outSource.open(QFile::WriteOnly); QTextStream stream(&outSource); QString constructor = fi.baseName() + "::" + fi.baseName(); QStringList existinInitializerList; list = cpp.split(QRegExp("\\n")); QStringListIterator sourceLines(list); while(sourceLines.hasNext()) { QString line = sourceLines.next(); stream << line << "\n"; if(line.contains(constructor) == true) { // We are in the constructor line = sourceLines.next(); // Should be the super class stream << line << "\n"; line = sourceLines.next(); // Should be the first instance variable of class bool stop = false; while(stop == false) { QStringList chunks = line.split(','); existinInitializerList << chunks.at(0).trimmed(); if(chunks.size() == 1) { stop = true; } else { line = sourceLines.next(); } } break; } } // lines = QStringListIterator(existinInitializerList); // while (lines.hasNext()) // { // std::cout << lines.next().toStdString() << std::endl; // } QStringList newList; lines = QStringListIterator(initializerList); while(lines.hasNext()) { QString line = lines.next(); QStringList result; result = existinInitializerList.filter(line + "("); if(result.size() == 1) { newList << result.at(0); } else { if(line.contains("nullptr") == false) { newList << line + QString(""); } else { newList << QString(line); } } } // std::cout << "----------------" << std::endl; QString outS; QTextStream ss(&outS); lines = QStringListIterator(newList); while(lines.hasNext()) { ss << " " << lines.next(); if(lines.hasNext() == true) { ss << ",\n"; } else { ss << "\n"; } } stream << outS; // Finish writing the source file back out while(sourceLines.hasNext()) { QString line = sourceLines.next(); stream << line << "\n"; } outSource.close(); } // ----------------------------------------------------------------------------- // // ----------------------------------------------------------------------------- void replaceText1(QString hFile, QString cppFile) { QFile h(hFile); h.open(QFile::ReadOnly); QString header = h.readAll(); h.close(); QFile source(cppFile); source.open(QFile::ReadOnly); QString cpp = source.readAll(); QStringList varNames; QStringList typeNames; bool doReplace = false; QString searchStr("size_t totalFeatures = m->getAttributeMatrix(getCellFeatureAttributeMatrixName())->getNumberOfTuples();"); int index = cpp.indexOf(searchStr); int hIndex = -1; if(index > 0) { hIndex = header.indexOf("SIMPL_INSTANCE_STRING_PROPERTY(CellFeatureAttributeMatrixName)"); if(hIndex < 0) // This class does not have a Feature Attribute Matrix Name { cpp.replace(searchStr, "size_t totalFeatures = 0;"); doReplace = true; } } searchStr = QString("size_t totalEnsembles = m->getAttributeMatrix(getCellEnsembleAttributeMatrixName())->getNumberOfTuples();"); index = cpp.indexOf(searchStr); hIndex = -1; if(index > 0) { hIndex = header.indexOf("SIMPL_INSTANCE_STRING_PROPERTY(CellEnsembleAttributeMatrixName)"); if(hIndex < 0) // This class does not have a Ensemble Attribute Matrix Name { cpp.replace(searchStr, "size_t totalEnsembles = 0;"); doReplace = true; } } //// WRITE THE HEADER BACK OUT TO A FILE if(doReplace == true) { QFileInfo fi(cppFile); #if 0 QFile hOut("/tmp/junk/" + fi.fileName()); #else QFile hOut(cppFile); #endif hOut.open(QFile::WriteOnly); QTextStream stream(&hOut); stream << cpp; hOut.close(); qDebug() << "Saved File " << cppFile; } } // ----------------------------------------------------------------------------- // // ----------------------------------------------------------------------------- void replaceText(QString hFile, QString cppFile) { QFile h(hFile); h.open(QFile::ReadOnly); QString header = h.readAll(); h.close(); QFile source(cppFile); source.open(QFile::ReadOnly); QString cpp = source.readAll(); QStringList varNames; QStringList typeNames; int index = cpp.indexOf("cellAttrMat->getPrereqArray<"); int endIdx = 0; bool doReplace = false; while(index > 0) { endIdx = cpp.indexOf(";", index); QString sub = cpp.mid(index, endIdx - index); sub = sub.section(',', 2, 2); sub = sub.trimmed(); QString _type = cpp.mid(index, endIdx - index); endIdx = sub.indexOf("AttributeMatrixName"); sub = sub.mid(2, endIdx - 2); varNames << sub; doReplace = true; index = cpp.indexOf("cellAttrMat->getPrereqArray<", index + 1); // break; } if(doReplace == false) { return; } doReplace = false; // Now open the header file and read that. for(int i = 0; i < varNames.size(); ++i) { QString s("SIMPL_INSTANCE_STRING_PROPERTY(" + varNames.at(i) + "AttributeMatrixName)"); index = header.indexOf(s, 0); if(index < 0) // Not found { QString dc("SIMPL_INSTANCE_STRING_PROPERTY(SurfaceDataContainerName)\n"); int idx = header.indexOf(dc, 0); if(idx > 0) { header.replace(dc, dc + " " + s + "\n"); doReplace = true; } } } //// WRITE THE HEADER BACK OUT TO A FILE if(doReplace == true) { QFileInfo fi(hFile); #if 0 QFile hOut("/tmp/junk/" + fi.fileName()); #else QFile hOut(hFile); #endif hOut.open(QFile::WriteOnly); QTextStream stream(&hOut); stream << header; hOut.close(); qDebug() << "Saved File " << hFile; } } // ----------------------------------------------------------------------------- // // ----------------------------------------------------------------------------- void fixDataCheck(QString hFile, QString cppFile) { // Read the Source File QFileInfo fi(cppFile); QFile source(cppFile); source.open(QFile::ReadOnly); QString cpp = source.readAll(); source.close(); QStringList list; QStringList initializerList; list = cpp.split(QRegExp("\\n")); QString dataCheck = QString("void ") + fi.baseName() + QString("::dataCheck("); QString preflight = QString("void ") + fi.baseName() + QString("::preflight()"); bool needCellAttrGet = false; bool needCellAttrCreate = false; bool hasCellGet = false; bool hasCellCreate = false; bool alreadyHasDecl = false; QStringListIterator sourceLines(list); while(sourceLines.hasNext()) { QString line = sourceLines.next(); if(line.contains(dataCheck)) { // We are in the constructor bool stop = false; while(stop == false) { line = sourceLines.next(); if(line.contains("cellEnsembleAttrMat->get")) { hasCellGet = true; } if(line.contains("cellEnsembleAttrMat->create")) { hasCellCreate = true; } if(line.contains("AttributeMatrix* cellEnsembleAttrMat =")) { alreadyHasDecl = true; } if(line.contains(preflight)) { stop = true; } if(sourceLines.hasNext() == false) { stop = true; } } break; } } if(hasCellGet == true && alreadyHasDecl == false) { needCellAttrGet = true; } else if(hasCellCreate == true && alreadyHasDecl == false) { needCellAttrCreate = true; } if(!needCellAttrGet && !needCellAttrCreate) { return; } qDebug("Updating %s", qPrintable(cppFile)); #if 0 QFile fout("/tmp/out.cpp"); #else QFile fout(cppFile); #endif fout.open(QFile::WriteOnly); QTextStream out(&fout); sourceLines = QStringListIterator(list); while(sourceLines.hasNext()) { QString line = sourceLines.next(); if(line.contains(dataCheck)) { out << line << "\n"; // We are in the constructor bool stop = false; while(stop == false) { line = sourceLines.next(); if(line.contains("VolumeDataContainer* m = getDataContainerArray()")) { out << line << "\n"; line = sourceLines.next(); out << line << "\n"; if(needCellAttrGet) { out << " AttributeMatrix::Pointer cellEnsembleAttrMat = m->(this, getCellEnsembleAttributeMatrixName(), -301);\n"; out << " if(getErrorCondition() < 0) { return; }\n"; } if(needCellAttrCreate) { out << " AttributeMatrix::Pointer cellEnsembleAttrMat = m->createNonPrereqAttributeMatrix(this, getCellEnsembleAttributeMatrixName(), " "SIMPL::AttributeMatrixType::CellEnsemble);\n"; out << " if(getErrorCondition() < 0) { return; }\n"; } stop = true; } else { out << line << "\n"; } if(line.contains(preflight)) { out << line << "\n"; stop = true; } } } else { if(sourceLines.hasNext()) { out << line << "\n"; } } } fout.close(); } void scanDirIter(QDir dir) { QDirIterator iterator(dir.absolutePath(), QDirIterator::Subdirectories); while(iterator.hasNext()) { iterator.next(); if(!iterator.fileInfo().isDir()) { QString filename = iterator.fileName(); if(filename.endsWith(".cpp")) { // qDebug("Found %s matching pattern.", qPrintable(filename)); QFileInfo fi(iterator.filePath()); QString header = fi.path() + "/" + fi.baseName() + ".h"; QString source = iterator.filePath(); fixDataCheck(header, source); } } } } int main(int argc, char* argv[]) { Q_ASSERT(false); // We don't want anyone to run this program. //#if 0 // QString header = argv[1]; // QString source = argv[2]; // buildInitializerList(header, source); //#else // scanDirIter(UnitTest::DREAM3DProjDir + QString("/Source/DREAM3DLib")); // scanDirIter(UnitTest::DREAM3DProjDir + QString("/Source/Plugins")); //#endif return 0; }
def bucket_stats(buckets): bucket_dist = np.array([len(x) for x in buckets.values() if len(x) > 1], dtype=np.uint64) comparisons_dist = np.array([x*(x-1)//2 for x in bucket_dist], dtype=np.uint64) mean_bucket_size = np.mean(bucket_dist) median_bucket_size = np.median(bucket_dist) num_pairwise_comparisons = np.sum(comparisons_dist) return mean_bucket_size, median_bucket_size, num_pairwise_comparisons
/* * Copyright 2018 Karlsruhe Institute of Technology. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.kit.ocrd.workspace.entity; import com.arangodb.springframework.annotation.Document; import com.arangodb.springframework.annotation.HashIndex; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.annotation.Id; /** * This class contains all important information a METS document. These are: * <p> * <ul> * <li>Title</li> * <li>Subtitle</li> * <li>PPN</li> * <li>Year</li> * <li>License</li> * <li>Author</li> * <li>Publisher</li> * <li>No of pages</li> * <li>Physical description</li> * </ul><p> */ @Document("metsProperties") @HashIndex(fields = {"resourceId", "version"}, unique = true) public class MetsProperties { /** * Logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(MetsProperties.class); /** * Placeholder if no title is available. */ public static final String NO_TITLE = "No title available!"; /** * Placeholder if no PPN is available. */ public static final String NO_PPN = "No PPN available!"; /** * ID of the document. */ @Id private String id; /** * Resource Identifier for Document. */ private String resourceId; /** * Title of resource. */ private String title; /** * Sub title of the document. */ private String subTitle; /** * Year of publication. */ private String year; /** * License of document. */ private String license; /** * Author of the document. */ private String author; /** * Number of pages (images). */ private int noOfPages; /** * Publisher. */ private String publisher; /** * Physical description. */ private String physicalDescription; /** * PPN of resource. */ private String ppn; /** * Default constructor for MetsFile. */ public MetsProperties() { super(); title = NO_TITLE; ppn = NO_PPN; } /** * Get database ID. * * @return the id */ public String getId() { return id; } /** * Set database ID. (Shouldn't be used.) * * @param id the id to set */ public void setId(String id) { this.id = id; } /** * Get resourceId of the METS document. * * @return Resource ID of the METS document. */ public String getResourceId() { return resourceId; } /** * Set resourceId of the METS document. * * @param resourceId Resource ID of the METS document. */ public void setResourceId(String resourceId) { this.resourceId = resourceId; } /** * Get title of resource. * * @return the title */ public String getTitle() { return title; } /** * Set title of resource. * * @param title the title to set */ public void setTitle(String title) { this.title = title; } /** * Get subtitle of document. * * @return the subTitle */ public String getSubTitle() { return subTitle; } /** * Set subtitle of document. * * @param subTitle the subTitle to set */ public void setSubTitle(String subTitle) { this.subTitle = subTitle; } /** * Get year of publication. * @return the year */ public String getYear() { return year; } /** * Set year of publication. * @param year the year to set */ public void setYear(String year) { this.year = year; } /** * Get license of the document. * @return the license */ public String getLicense() { return license; } /** * Set license of the document. * @param license the license to set */ public void setLicense(String license) { this.license = license; } /** * Get author of document. * @return the author */ public String getAuthor() { return author; } /** * Set author of document. * @param author the author to set */ public void setAuthor(String author) { this.author = author; } /** * Get number of pages. * @return the noOfPages */ public int getNoOfPages() { return noOfPages; } /** * Set number of pages. * @param noOfPages the noOfPages to set */ public void setNoOfPages(int noOfPages) { this.noOfPages = noOfPages; } /** * Get publisher. * @return the publisher */ public String getPublisher() { return publisher; } /** * Set publisher. * @param publisher the publisher to set */ public void setPublisher(String publisher) { this.publisher = publisher; } /** * Get physical description. * @return the physicalDescription */ public String getPhysicalDescription() { return physicalDescription; } /** * Set physical description. * @param physicalDescription the physicalDescription to set */ public void setPhysicalDescription(String physicalDescription) { this.physicalDescription = physicalDescription; } /** * Get the ppn of resource. * * @return the ppn */ public String getPpn() { return ppn; } /** * Set ppn of resource. * * @param ppn the ppn to set */ public void setPpn(String ppn) { this.ppn = ppn; } @Override public String toString() { return "MetsProperties [id=" + id + ", resourceId=" + resourceId + ", title=" + title + ", ppn=" + ppn + "]"; } }
Most often we see dash cam footage of people doing the craziest things in traffic, but this time we have an extraordinary video! This lady’s dash cam actually captured two idiot who attempted to rip her off her insurance by staging a traffic accident! This is just hilarious. Two guys stand on the side of the street and when the lady drives near them, one of the guys pushes his bike in front of the car, then immediately runs backwards towards the car and jumps on the hood! We are laughing already, because this stage was as terrible as they can get! However, neither of the men were prepared for the driver of the car to get out and reveal that the camera on her dashboard had been recording their pathetic attempts to get a payout from the start. The lady, visibly and audibly distraught, got out of the car, phone in hand to call the police, when the other dude, obviously the first one’s accomplice, comes over telling the lady that he recorded everything on his telephone. The moment she hears their attempt at a scam, she scream “You’re mad, you’re absolutely mad. I’ve got a camera. Do you understand you’re on video, you complete idiot?” This was just plain gold! The moment they heard the word “camera”, the two amateur scammers grabbed their stuff and get away from the scene as quickly as they can as the woman rings police. The reveal is a shock to the two men, who immediately try to cover their faces and get away from the scene as quickly as they can as the woman rings police. According to reports, the scam the motorist tried to pull off is already widely prevalent around the world. This is one of the reasons why you should own a dash cam.
The De Wet Apostle paintings in the chapel at Glamis Castle SUMMARY The paintings in the chapel at Glamis Castle were commissioned from the Dutch artist Jacob de Wet in 1688 by Patrick, first earl of Strathmore. De Wet created a successful decorative scheme in the chapel as stipulated by the earl by the combination of two quite different pictorial sources: the celling panels contain scenes from the life of Christ after Boetius a Bolswert, first published in 1622, and the wall panels show Apostles originally after A I Callot published in 1631. A number of discrepancies exist between the panels and the source material, some of which can be explained by the popular plagiarism carried out by lesser engravers in the 17th century. It is generally accepted that the quality of art being produced in Britain at this time was far inferior to that of the rest of Europe. However, de Wet's work in the chapel at Glamis is an effective and skilful realization of a decorative scheme within the terms of a restrictive commission.
Image caption Lionel Messi's No.10 shirt is a favourite among young Palestinians Gaza football fans have defied a Hamas order not to watch a match between Spanish giants Barcelona and Real Madrid. Hamas, which governs Gaza, is angry that an Israeli soldier attended the game. But thousands watched the 2-2 draw on TV. Jon Donnison explains why the fixture matters so much in Gaza. Barcelona Football Club arguably brings more joy to Palestinians than any other institution in the world. That's certainly my impression after almost three years in Gaza and the West Bank. Every time the Catalans take to the field you can guarantee you'll struggle to get a table in the bars of Ramallah and the shisha-infused coffee shops of Gaza City. On match day tradesmen touting maroon and gold Barca jerseys set up shop at Qalandia, the traffic-infested Israeli military checkpoint that separates Ramallah from East Jerusalem. I've lost track of the number of mini Messis I've seen dribbling their way around dusty backstreets. And if Messi senior and co manage to pull off a win, as of course they nearly always do, the night air is filled with the sound of car horns as supporters crane out of their sunroofs and parade round the streets. Of course, as the best team in the world in recent years, Barcelona FC has support across the globe. Everyone loves a winner. But Palestinians seem to have a particular affinity for the club. "We can identify with the Catalans and their struggle against the great power of Madrid, like the way we struggle against Israel," one supporter in Ramallah told me with a slightly far-fetched simile. I can't remember too many signs of military occupation the last time I was strolling down Las Ramblas. Barca rules Image caption Palestinians say they identify with Catalan Barcelona's struggle with Spain Of course there are a good number of Real Madrid fans here too but they're definitely outnumbered. You certainly don't meet too many people cheering for Manchester United, Arsenal or Chelsea, let alone my team Bolton Wanderers. For Palestinians, Barca rules. It was with some surprise then that I learned that there were calls for Palestinians to boycott Barcelona FC and in particular this weekend's showdown with Real Madrid, El Clasico. If ever there was an institution that Palestinians might struggle to turn their backs on, I thought, it would be their beloved Barcelona. But this is a classic example of how there are few things untouched by politics in this part of the world. The controversy all started when Israeli government officials put in a request to Barcelona FC for a former Israeli soldier to be able to attend Sunday's game. Not just any soldier, though. Gilad Shalit is the young man who spent more than five years as a hostage in Gaza after being captured by Palestinian militants in 2006. He was eventually freed last year after the Islamist movement Hamas, which governs in Gaza, agreed a prisoner swap with Israel in which more than 1,000 Palestinian prisoners were handed over in exchange. Mr Shalit has kept a low profile since his release, with his parents asking for privacy to allow their son to re-adjust to life as a free man. The 26-year-old is a keen football fan and is believed to support Real Madrid. Boycott call Israeli officials put in a request to Barcelona FC, asking whether Mr Shalit could attend the game at the Camp Nou stadium. The club said yes. But when the news hit the media, some Palestinian groups called for Barcelona to withdraw their invitation and threatened a boycott. Hamas officials in Gaza were quoted as saying Barcelona games would no longer be broadcast on television in the Palestinian coastal territory, without adding how they would go about stopping this from happening in practice. Barcelona FC realised they had opened something of a can of worms. "I watch football and support Barcelona, because I see only sports, I don't see politics - I won't allow for Israel to deny us from this fun time Nasser Ziad, Barca fan from Gaza Club officials quickly announced that it had also invited three Palestinian representatives to the game, including a Palestinian footballer who spent three years detained in an Israeli jail without ever being formally charged. Mahmoud Sarsak, who has played for the Palestine national side, was eventually released in July this year after having been on hunger strike - taking only water and vitamins - for three months in protest at his detention. Israel believes Sarsak is a member of the Palestinian militant group Islamic Jihad. Sarsak is a popular figure for Palestinians and Barcelona had hoped that by inviting him to counterbalance Shalit the club might be able to put an end to the controversy. Not so. Sarsak announced in Gaza that he would not be attending the match on principle. "I refuse to sit in the same place with a killer who came on a military tank," the former prisoner said, referring to Shalit. "I respect Barcelona's invitation, but I have to avoid angering the Palestinian people and their supporters as well as all those who supported me during my hunger strike." Own goal Sarsak added that he also respected those who believed he should attend the match in order to represent the suffering of Palestinians but said that after giving it a lot of thought he would boycott the game. Image caption The bright lights of the Nou Camp stadium are likely to remain a big draw for Palestinians watching on television Hamas has been keen to trumpet Sarsak's decision, although I suspect they may have plenty of now closeted Barcelona supporters among their ranks. It's worth noting that this row has also highlighted the ability of Hamas and their secular rivals Fatah, who are in power in parts of the West Bank, to disagree on just about anything. The two other Palestinians offered tickets by Barcelona were Jibril Rajoub, a leading Fatah figure and head of the Palestinian Football Federation and also the Fatah dominated Palestine Liberation Organisation's ambassador to Spain, Musa Amer Odeh. When contacted by the BBC on Thursday, Mr Rajoub confirmed he and Ambassador Odeh would not be following Mahmoud Sarsak's example and would be attending the game. Hamas is likely to try to make political hay. It's hard not to feel, though, that Hamas has scored something of an own goal here, if you'll pardon the pun. Most Barcelona fans I've spoken to here have appeared gobsmacked when I've suggested they might put political principles before El Clasico. "Are you kidding me?" has been the most common reaction. "I watch football and support Barcelona, because I see only sports, I don't see politics. I won't allow for Israel to deny us from this fun time," says Nasser Ziad, a 25-year-old Barca fan in Gaza. "I was so sad and disappointed when they announced that the Israeli soldier Gilad Shalit will attend the Clasico but, personally speaking, I wish Sarsak had accepted the invitation to the match," he said. "I don't see it as a way of normalisation with Israel. It would have brought more attention to our cause." Thirty-year-old Ahmed Shafik agreed: "We should turn this into an Israeli-Palestinian Clasico. Sarsak should have gone to Camp Nou and used his presence for the good of our cause." I did not find any Barcelona fans in either Gaza or the West Bank who said they wouldn't be positioning themselves in front of a big screen on Sunday night. Most Palestinians probably don't remember Bill Shankly, the legendary former Liverpool manager. But they would probably have appreciated his most famous quote: "Some people believe football is a matter of life and death. I am very disappointed with that attitude. I can assure you it is much, much more important than that."
Put this chart down for one that helps support that narrative that all millennials are losers that live in their parents' basement. Via Deutsche Bank's Torsten Sløk, we find that nearly 20% of men between the ages of 25 and 34 live at home. As a point of comparison, about 12% of women this age are living at home. It's unclear if these home-dwellers are living in the basement or not. Deutsche Bank Last year, Sløk highlighted a similar chart showing that about a third of 18- to 34-year-old Americans were living at home, though when you skew this age cohort younger you get a lot of young people who live at home while attending school. But as this latest figure shows that basically, even after school young people aren't moving out that quickly. Ultimately, you can see this dynamic one of two ways: either the number of young people living at home is a bullish force for the housing market going forward, or this is indicative of a lost generation that has suffered the ill-effects of a deep recession, massive student loan debts, and a general proclivity for incompetence. The truth is probably somewhere in the middle, as tends to be the case.
<gh_stars>0 /* * Copyright 1999-2018 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.nacos.api.naming.pojo.healthcheck.impl; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.util.Map; import static org.junit.Assert.*; public class HttpTest { private ObjectMapper objectMapper; private Http http; @Before public void setUp() { objectMapper = new ObjectMapper(); http = new Http(); } @Test public void testGetExpectedResponseCodeWithEmpty() { http.setHeaders(""); assertTrue(http.getCustomHeaders().isEmpty()); } @Test public void testGetExpectedResponseCodeWithoutEmpty() { http.setHeaders("x:a|y:"); Map<String, String> actual = http.getCustomHeaders(); assertFalse(actual.isEmpty()); assertEquals(1, actual.size()); assertEquals("a", actual.get("x")); } @Test public void testSerialize() throws JsonProcessingException { http.setHeaders("x:a|y:"); http.setPath("/x"); String actual = objectMapper.writeValueAsString(http); assertTrue(actual.contains("\"path\":\"/x\"")); assertTrue(actual.contains("\"type\":\"HTTP\"")); assertTrue(actual.contains("\"headers\":\"x:a|y:\"")); assertTrue(actual.contains("\"expectedResponseCode\":200")); } @Test public void testDeserialize() throws IOException { String testChecker = "{\"type\":\"HTTP\",\"path\":\"/x\",\"headers\":\"x:a|y:\",\"expectedResponseCode\":200}"; Http actual = objectMapper.readValue(testChecker, Http.class); assertEquals("x:a|y:", actual.getHeaders()); assertEquals("/x", actual.getPath()); assertEquals(200, actual.getExpectedResponseCode()); assertEquals("x:a|y:", actual.getHeaders()); assertEquals(Http.TYPE, actual.getType()); } }
// NewBpeFromFiles create BPE model from vocab and merges files func NewBpeFromFiles(vocab, merges string) (*BPE, error) { b := NewBpeBuilder() b.Files(vocab, merges) return b.Build() }
<filename>instant/econ/gdp/worldbank_test.go package gdp import ( "net/http" "reflect" "testing" "time" "github.com/jarcoal/httpmock" "github.com/jivesearch/jivesearch/instant/econ" ) func TestFetch(t *testing.T) { httpmock.Activate() defer httpmock.DeactivateAndReset() type args struct { country string from time.Time to time.Time } for _, tt := range []struct { name string args u string resp string want *Response }{ { name: "basic", u: `http://api.worldbank.org/v2/countries/IT/indicators/NY.GDP.MKTP.CD`, resp: `<?xml version="1.0" encoding="utf-8"?><wb:data page="1" pages="2" per_page="50" total="58" lastupdated="2018-07-25" xmlns:wb="http://www.worldbank.org"><wb:data><wb:indicator id="NY.GDP.MKTP.CD">GDP (current US$)</wb:indicator><wb:country id="IT">Italy</wb:country><wb:countryiso3code>ITA</wb:countryiso3code><wb:date>2017</wb:date><wb:value>1934797937411.33</wb:value> <wb:unit/> <wb:obs_status/> <wb:decimal>0</wb:decimal></wb:data><wb:data><wb:indicator id="NY.GDP.MKTP.CD">GDP (current US$)</wb:indicator><wb:country id="IT">Italy</wb:country><wb:countryiso3code>ITA</wb:countryiso3code><wb:date>2016</wb:date><wb:value>1859383610248.72</wb:value> <wb:unit/> <wb:obs_status/> <wb:decimal>0</wb:decimal></wb:data></wb:data>`, args: args{ country: "IT", from: time.Date(1930, 12, 31, 0, 0, 0, 0, time.UTC), to: time.Date(2018, 12, 31, 0, 0, 0, 0, time.UTC), }, want: &Response{ History: []Instant{ {time.Date(2017, 12, 31, 0, 0, 0, 0, time.UTC), 1934797937411.33}, {time.Date(2016, 12, 31, 0, 0, 0, 0, time.UTC), 1859383610248.72}, }, Provider: econ.TheWorldBankProvider, }, }, } { t.Run(tt.name, func(t *testing.T) { responder := httpmock.NewStringResponder(200, tt.resp) httpmock.RegisterResponder("GET", tt.u, responder) // no responder found???? w := &WorldBank{ HTTPClient: &http.Client{}, } got, err := w.Fetch(tt.args.country, tt.args.from, tt.args.to) if err != nil { t.Fatal(err) } if !reflect.DeepEqual(got, tt.want) { t.Errorf("got %+v, want %+v", got, tt.want) } }) } httpmock.Reset() }
/** * This plugin creates a panel which allows testing of custom QML dialogs * * @ingroup plugins * @image html plg_dialog_test.png * @note Requires Plugins: * - @ref coreplugins */ class DialogTestPlugin : public PluginMain, public Depends<IViewCreator> { public: bool PostLoad(IComponentContext& context) { auto definitionManager = context.queryInterface<IDefinitionManager>(); assert(definitionManager != nullptr); definitionManager->registerDefinition<TypeClassDefinition<DialogTestPanel>>(); definitionManager->registerDefinition<TypeClassDefinition<DialogReflectedData>>(); return true; } void Initialise(IComponentContext& context) { auto definitionManager = context.queryInterface<IDefinitionManager>(); assert(definitionManager != nullptr); dialogPanel_ = ManagedObject<DialogTestPanel>::make(); dialogPanel_->initialise(); dialogView_ = get<IViewCreator>()->createView("plg_dialog_test/dialog_test_panel.qml", dialogPanel_.getHandleT()); } bool Finalise(IComponentContext& context) { auto uiApplication = context.queryInterface<IUIApplication>(); assert(uiApplication != nullptr); if (dialogView_.valid()) { auto view = dialogView_.get(); uiApplication->removeView(*view); view = nullptr; } dialogPanel_ = nullptr; return true; } void Unload(IComponentContext& context) { } private: ManagedObject<DialogTestPanel> dialogPanel_ = nullptr; wg_future<std::unique_ptr<IView>> dialogView_; }
Obviously it's very unlikely that Sony would use any cast members from Sam Raimi's original Spider-Man flicks in their rebooted franchise, but if it was ever on the table, it sounds like the awesome Alfred Molina would grab the opportunity with eight hands.. “That was the most fun I think I’ve ever had on a movie of that kind, you know, those big, big sort of features where you spend like six months hanging off a wire, you know, and that stuff. But it was the first movie of that kind that I’ve ever been involved in. I had a wonderful time. I loved it. I mean, I’d go back and do it again in a heartbeat.” When (and if) Sony eventually bring their plannedmovie to our screens, word is that it'll be Doctor Octopus leading the team. One of Spidey's most recognizable foes, Doc Ock was the villain in Sam Raimi's, where he was played brilliantly by British actor Alfred Molina -- so is there any hope for a role reprisal? Well, since Marc Webb'smovies kick-started a rebooted franchise it seems unlikely, but while chatting to Molina about his new movie, Collider asked him about it anyway:But Molina is well aware of how these things work, adding:It is a shame though, especially when you consider how well regarded Raimi's trilogy (okay, the first two films!) are when compared to Webb'sand its sequel. When it does come time to cast the character again though, there's no doubt they'll have lots of new talent banging on the door. Even Molina'sco-star John Lithgow fancies the gig:. I'd watch it! More at the link below.
from pyQuARC.code.checker import Checker from .fixtures.checker import FUNCTION_MAPPING from .common import read_test_metadata class TestChecker: """ Test cases for the Checker script in checker.py """ def setup_method(self): self.checker = Checker() self.test_metadata = read_test_metadata() def test_run(self): result = self.checker.run(self.test_metadata) assert result def test_map_to_function(self): for in_, out_ in zip(FUNCTION_MAPPING["input"], FUNCTION_MAPPING["output"]): result = self.checker.map_to_function(in_["datatype"], in_["function"]) assert bool(callable(result)) == out_
Psychotherapy and transsexualism We questioned whether transsexuals always require the psychotherapy demanded by the health insurance system in Germany. For this purpose, we examined 430 transsexuals who came to our facility between 1988 and 2006. At the first consultation after the history was taken, they filled out standardised questionnaires, which are needed for proper diagnosis of psychotherapeutic treatment. These questionnaires included the Complaint Questionnaire (BFB), the Behaviour Questionnaire (VFB) and the Freiburg Personality Inventory (FPI‐A). It was found that two‐thirds of all transsexuals do not require deeper psychotherapy. However, there was evidence of personality deficits in a subgroup of transsexuals for whom supportive psychotherapy should be recommended. The conditions under which psychotherapy and other forms of support are successful are discussed.
/** * Delete the entities with the given keys asynchronously. * * @param keys Keys of the entities to delete. * @return Callback that can be used to complete the delete operation later. */ @Nonnull @SuppressWarnings("unchecked") default Runnable deleteByKeyAsync(Key<E>... keys) { return deleteByKeyAsync(Arrays.asList(keys)); }
/** * Given an array of WordNet nouns, return an outcast. * * @param nouns array of nouns. * @return Furthest noun to the rest of nouns. */ public String outcast(String[] nouns) { int maxDistance = -1; int maxDistanceIdx = -1; for (int i = 0; i < nouns.length; i++) { int distance = 0; for (int j = 0; j < nouns.length; j++) { if (i != j) { distance = distance + this.wordNet.distance(nouns[i], nouns[j]); } } if (distance > maxDistance) { maxDistanceIdx = i; maxDistance = distance; } } return nouns[maxDistanceIdx]; }
MOLECULAR DIAGNOSIS AND EVOLUTIONARY RELATIONSHIP ANALYSIS OF PLANT PARASITIC TEA GARDEN NEMATODES FROM DIFFERENT TEA ESTATES IN SYLHET REGION OF BANGLADESH Nematodes from plant-parasitic sources are ever-present and incidental to plant growth as well as crop production. The damage of tea gardens caused by nematode is often non-specific and easily confused with symptoms. The present study determined the parasitic and non-parasitic nematodes population in different tea gardens of the Sylhet region by their morphological and partial molecular characterization. Out of 13 tea gardens, it was observed that BTRI, Karimpur, Mathiura, and Tarapur tea garden has the highest number of parasitic and non-parasitic nematodes. After PCR amplification, DNA bands with desired amplicon size were detected by gel electrophoresis. Among thirteen soil samples, nematodes from Malnichara, Karimpur, BTRI, Mathiura , and Finlay had partially confirmed the presence of rootknot nematode (Meloidogyne spp.), root-lesion nematode (Pratylenchus brachyurus), burrowing nematode (Radopholus similis), reniform nematode (Rotylenchulus reniformis) and lance nematode (Hoplolaimus columbus) consequently based on approximately base pair of 1.7, 1.1 and 0.52 kb (different Meloidogyne spp.) 0.52, 0.52, 0.25 and 2.3 kb of specific genes. From evolutionary analysis, it might be said that Meloidogyne species are strongly related with each other making clusters except Meloidogyne natalie where this one is closely related with Hoplolaimus columbus in their evolutionary relationship as remaining others (Rotylenchulus reniformis, Radopholus similis, Pratylenchus brachyurus) are in different clusters in the same clade and this result could be confirmed after sequencing. Introduction Tea, Camellia sinensis, is a principal perennial cash crop in Bangladesh that has a great impact on agricultural export and domestic demand for sustaining the economy of Bangladesh. Tea is a popular nonalcoholic drink made from the leaves of an evergreen shrub or tree of Camellia sinensis belonging to the family Theaceae over the world. Tea is cultivated as a monoculture over large adjacent areas during the last 165 years and generated a strong tea ecosystem for various insects, mites, and nematodes in Bangladesh. Sylhet, Chittagong, and Phanchagarh district is the main ecological zone for the cultivation of tea in Bangladesh. The spreading and persistency of these insects, mites, and nematodes are responsible for the performance of shade trees, ancillary crops forests, a uniformity of cultural practices such as sequential pruning cycles, weekly plucking rounds, weeding, mulching, and so on. Above 40 species, nearness 20 genera of pest of nematode have been identified in different tea gardens across the world (Mamun 2011). In the tea cultivation area in Bangladesh, 71 insects, mite, and nematode species belonging to 45 families under 14 orders are recorded as pests of tea, and among them, 25.35% of species are foliar insects and mites, 26.76% are soil insects and nematodes, 21.13% are beneficial insects and 26.76% are butterflies. Different plant parts of tea such as leaf, root, stem, flower, and seed are affected by these pests of insects, mites, and nematodes that results a loss of production about 15% every year (Azad et al. 2020). Soil nematodes are tiny (0.3 -5.0 mm) worm-like animals that are plethoric and numerous in all told soils (Yeates and Bongers 1999). They play a crucial role in the decomposition of organic matter and nutrient cycling. One of the foremost goals of property agriculture ought to be to boost populations of free-living nematodes and scale back that of plant-parasitic nematodes. Diagnosis of plant-parasitic nematodes associated with tea garden issues is difficult as a result of symptoms varying with the environmental conditions and the plant growth stage. Owing to the wide array of disorders that would be caused by nematodes, an associate degree assay of soil and root for nematodes is important. Some symptoms like root-knot, discoloration of tea bushes, and root lesion are appeared with poignant of various species of the nematode as early recognized (Kamunya et al. 2008). In our country, most of the tea garden uses pesticides for controlling pest for better leaf production. Farmers do not perform accurate diagnosis in many cases during the control procedure for tea pests. Accurate identification of individual species as well as characterization of pathogenicity of tea garden plant-parasitic nematodes are regarded as indispensable tasks for designing effective diagnosis, treatment and management strategies of nematode infections or probable causes of host-plant resistance against plant-parasitic nematodes associated with tea plants (Yeates et al. 2015). In modern nematode taxonomy, molecular systematics is a very important tool (Subbotin 2005, Ye et al. 2007). For a further appropriate understanding of agricultural pest biology, evolutionary relationship studies among nematode species are very imperative, though it is not essential for nematode taxonomy (Subbotin 2001). For evolutionary analysis of plant-parasitic nematodes, different genomic regions viz. 18S rDNA, ITS, and 28S rDNA, as well as mtDNA are used most frequently (Subbotin 2005, Ye et al. 2007, Kumari 2012. 18S and ITS regions are very common among molecular systematics for near-fulllength sequences and evolutionary analysis of many plant-parasitic nematodes (Blaxter et al. 1991, Subbotin 2006). Unfortunately, little research has been done on this purpose, and intensive studies are required to expand and sustain tea production. Therefore, proper identification of beneficial and plant-parasitic nematodes associated with the tea soils is important. This study focused on the molecular identification of plant-parasitic nematodes, collected from several tea gardens in the Sylhet region of Bangladesh. Besides this, we have investigated the evolutionary relationships among the identified plant-parasitic nematode species from the available database. Collection of nematode sample and population determination Tea plant-parasitic nematode containing soil has been collected from different tea gardens of the greater Sylhet region. Considering the depth of 0-20 cm and 20-40 cm, soil samples were collected by using a soil auger. Rhizosphere soil samples were collected from stressed tea bushes. The stressed tea bushes were considered as those that did not have two leaves and a bud, become yellowish, wilting and, stunted growth (Adegbite and Adesiyan 2006). The soil was taken in proper air circulated plastic bag and transported in an insulated box to the laboratory and stored at 4°C until processing on the same day. For the nematode extraction from the soil, a modified Baermann funnel method (Cesarz et al. 2019) was used which is easy to isolate the nematodes. In the modified Baermann funnel, the apparatus consists of a plastic box with a mass in it. Two tissue papers (crisscross manner) were placed there. Each soil sample was mixed thoroughly, and 200g of soil was subsampled and was placed over the tissue with distilled water by cover-up and kept still overnight, where nematodes were moved through the tissue and settled at bottom of the box by gravity. The next morning the nematode suspension was collected and identified according to their typical morphological features, considering the presence of Sylhet. The population density was estimated by the formula (number of nematode/ml of suspension) × (total volume of the nematode suspension from 200 g of soil). The counting was done on a nematode counting slide under a dissecting microscope. DNA extraction DNA extraction was done using the standard protocol of Clear ® Detections Nematode DNA extraction and purification kit (Product no: EX-N-P-NDEP, Netherlands) which is designed to extract and purify DNA from nematode suspensions and/or multiple cysts. The kit separates DNA from proteins, detergents, and low molecular weight compounds. The purified nematode DNA was used for downstream applications such as PCR. PCR amplification using nematode specific primers In the present study, 16s rRNA regions from extracted and purified DNA regions were amplified by different nematode-specific primers (Table 1). For the amplification process, PCR mixture has been prepared with an aliquot of 50 µl solution, where 25 µl was the master mixture, 4 µl each for forward and reverse primer, 8 µL for template, and the remaining are nuclease-free water. All reactions are performed according to the following conditions: initial denaturation has been done for 1 minute with 94°C, then the cycling conditions for the amplification step has been done for 35 cycles and the extension period is for 1 minute at 72°C. Then the preparation was kept for 7 minutes for the final extension and then storage has been done at 4°C. All PCR amplifications were run on the Thermal Cycler (SimpliAmp™ Thermal Cycler, Applied Biosystem®, and the USA). Sequence retrieval For in silico observation of the expected nematodes species (among them eight sequences were from Meloidogyne spp.) nucleotide sequences were retrieved from NCBI (https://www. ncbi. nlm. nih. gov/ nucleotide). The sequences were saved into fasta format and were processed through Molecular Evolutionary Genetic Analysis (MEGAX) tools for the evolutionary analysis among the twelve nematodes species. Construction of evolutionary relationship An evolutionary relationship among the twelve species was determined on the comparison of the Polymerase Chain Reaction amplified result of that relevant species. Here, the evolutionary analysis was prepared with the Neighbor-Joining method with a bootstrap replicate value of 1000. Measurement of population densities Based on morphological characteristics of nematodes isolated from soil samples, plant-parasitic nematodes were identified in 9 of 13 soil samples from different tea gardens, with densities ranging from 50 to 200 individuals per 5 ml of nematode solution extracted from 200 g of soil (Table 2). In the present study, it was found that Karimpur (sec.1) and Finley tea estate among all of the collected samples showed the highest number of parasitic nematodes. Molecular detection by polymerase chain reaction In the present study, 13 samples were collected from different tea gardens in several locations. Here, 1 kb ladder was used. PCR amplification was done with nematode-specific primers yielding an amplification product of approximately 518 bps of the targeted 16S rRNA gene. Among the soil samples, nematodes extracted from the Malnichora tea garden (M2) showed 1.7, 1.1, and 0.52kb amplification of the 16S rRNA gene (Fig. 1). It is partially confirmed that M2 samples had root-knot nematodes (Meloidogyne spp.) according to their amplification sizes of 16S rRNA gene. Nematodes extracted from above mentioned six soil samples displayed 0.52, 0.52, 0.25 and 2.3 kb amplification of primer-specific gene ( Fig. 2 and 3). It is also partially confirmed that P2, X5, R2 and H3 samples had consequently of root-Lesion nematode (Pratylenchus brachyurus) from Karimpur section 1, Burrowing nematode (Radopholus similis) from BTRI, Reniform nematode (Rotylenchulus reniformis) from Mathiura, and Lance nematode (Hoplolaimus Columbus) from Finlay tea garden respectively. The present study performs extraction of DNA from nematodes sample. There has no record of extracting and molecular identification techniques for tea parasitic nematodes according to BTRI. An expeditious species identification technique for Meloidogyne spp has been represented. The benefits of the method are that it needs no radioactive isotopes and solely straightforward nematode lysis before PCR, rather than complicated extraction of DNA. The ability to identify soil nematodes permits species determinations to be conducted before planting. The specificity of the assay may allow monitoring species shifts in mixed Meloidogyne populations with single PCR amplification (Subbotin 2001). A survey of a lot of isolates ought to be conducted to verify the presence of all that necessary Meloidogyne spp. The repetition units are placed during a different region of the mitochondrial genome than the amplified region delineated within the present study. Evolutionary relationship among nematodes from different tea garden Nucleotide sequences of twelve nematodes species and their accession number is listed in Table 3. These sequences were analysed by MEGAX tools to see the evolutionary relationship were observed among these species. The Neighbor-Joining method was applied to infer the evolutionary history of the nematode species (Saitou and Nei 1985). The replicates (1000) were used to construct the bootstrap consensus tree (Felsenstein 1985) which was further employed to represent the evolutionary history of the taxa analyzed (Felsenstein 1985). Branches corresponding to partitions reproduced in less than 50% bootstrap replicates are collapsed. The proportion (percentage) of replicate trees in the constructed bootstrap (1000 replicate) where the closely related taxa clustered together are represented alongside the branches (Felsenstein 1985). The p-distance method which is based o the proportion of nucleotide differences per site was employed to calculate evolutionary distances (Nei and Kumar 2000). This evolutionary study involved 12 nucleotide sequences. For each pair of studied sequences, all ambiguous positions were eliminated by applying the pairwise deletion option. Finally, there were a total of 1263 positions for further analysis. Evolutionary studies were performed using MEGA X (Kumar 2018). Here, Meloidogyne species are strongly related to each other making clusters except Meloidogyne natalie in a clade. This Meloidogyne natalie is closely related with Hoplolaimus columbus in their evolutionary relation where the remaining three nematodes (Rotylenchulus reniformis, Radopholus similis, Pratylenchus brachyurus) are in different clusters in the same clade evolved from the common ancestor (Fig. 4). Conclusion Molecular diagnosis is important for the confirmation of plant-parasitic nematode species identification and analysis of their phylogenetic relationships with available sequences in databases. The present study provides the basic information on molecular identification based on PCR band of genomic DNA and a comprehensive framework of evolutionary relationships of plant-parasitic nematodes associated with tea garden. After DNA sequencing, it will be identified to species level and this study will aid in further characterization of diverse plant-parasitic nematodes in different tea gardens in the future.
package ru.spbau.tinydb.queries; import org.jetbrains.annotations.NotNull; import ru.spbau.tinydb.common.DBException; import ru.spbau.tinydb.engine.IDataBase; /** * @author adkozlov */ public interface IQuery<R> { @NotNull R execute(@NotNull IDataBase instance) throws DBException; }
<filename>pkg/jsonstream/types.go package jsonstream import ( "time" ) // JSONError wraps a concrete Code and Message as error. type JSONError struct { Code int `json:"code,omitempty"` Message string `json:"message,omitempty"` } // Error implement the error interface. func (e *JSONError) Error() string { return e.Message } // ProgressDetail represents the status. type ProgressDetail struct { Current int64 `json:"current"` Total int64 `json:"total"` } // JSONMessage defines a message struct for jsonstream. // It describes id, status, progress detail, started and updated. type JSONMessage struct { ID string `json:"id,omitempty"` Status string `json:"status,omitempty"` Detail *ProgressDetail `json:"progressDetail,omitempty"` Error *JSONError `json:"errorDetail,omitempty"` StartedAt time.Time `json:"started_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` }
<reponame>Swagger-Ranger/JavaSrc<filename>javaSE/src/main/java/com.silinx/source/swaggerranger/JavaCore/socket/ListeningThread.java package com.silinx.source.swaggerranger.JavaCore.socket; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.util.Vector; /** * 监听类 * 继承Thread来使用一个单独的线程来实现了类似线程池的框架,实现了对线程的简单管理, * 每次有连接进来都新建一个线程去处理,并在accept时将在运行的线程和未运行线程做清理 */ class ListeningThread extends Thread { private SocketServer socketServer; private ServerSocket serverSocket; private Vector<ConnectionThread> connectionThreads; private Vector<ConnectionThread> notRunningConnectionThreads; private boolean isRunning; public ListeningThread(SocketServer socketServer, ServerSocket serverSocket) { this.socketServer = socketServer; this.serverSocket = serverSocket; this.connectionThreads = new Vector<ConnectionThread>(); this.notRunningConnectionThreads = new Vector<ConnectionThread>(); isRunning = true; } @Override public void run() { while(isRunning) { if (serverSocket.isClosed()) { isRunning = false; break; } // Remove not running connection threads. for (ConnectionThread connectionThread : connectionThreads) { if (!connectionThread.isRunning()) { notRunningConnectionThreads.addElement(connectionThread); } } for (ConnectionThread connectionThread : notRunningConnectionThreads) { connectionThreads.remove(connectionThread); } notRunningConnectionThreads.clear(); try { Socket socket; // Socket accept方法将阻塞,则到建立连接 socket = serverSocket.accept(); ConnectionThread connectionThread = new ConnectionThread(socket, socketServer); connectionThreads.addElement(connectionThread); connectionThread.start(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } public void stopRunning() { for (int i = 0; i < connectionThreads.size(); i++) connectionThreads.elementAt(i).stopRunning(); isRunning = false; } }
<gh_stars>1-10 /*********************************************************************************************************************** * OpenStudio(R), Copyright (c) 2008-2018, Alliance for Sustainable Energy, LLC. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following * disclaimer. * * (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the * following disclaimer in the documentation and/or other materials provided with the distribution. * * (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote * products derived from this software without specific prior written permission from the respective party. * * (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative * works may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without * specific prior written permission from Alliance for Sustainable Energy, LLC. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER, THE UNITED STATES GOVERNMENT, OR ANY CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **********************************************************************************************************************/ #include "FloorplanJS.hpp" #include "ThreeJS.hpp" #include "Vector3d.hpp" #include "Geometry.hpp" #include "Plane.hpp" #include "Intersection.hpp" #include "../core/Assert.hpp" #include "../core/Path.hpp" #include "../core/Json.hpp" #include <jsoncpp/json.h> #include <iostream> #include <string> namespace openstudio{ const std::string BELOWFLOORPLENUMPOSTFIX(" Floor Plenum"); const std::string ABOVECEILINGPLENUMPOSTFIX(" Plenum"); const std::string PLENUMSPACETYPENAME("Plenum Space Type"); // DLM: needs to be coordinated with name in Model_Impl::plenumSpaceType() FloorplanObject::FloorplanObject(const std::string& id, const std::string& name, const std::string& handleString) : m_id(id), m_name(name), m_handle(toUUID(handleString)), m_handleString(handleString) {} FloorplanObject::FloorplanObject(const std::string& id, const std::string& name, const UUID& handle) : m_id(id), m_name(name), m_handle(handle), m_handleString(toString(handle)) {} FloorplanObject::FloorplanObject(const Json::Value& value) : m_data(value) { m_id = m_data.get("id", "").asString(); m_data.removeMember("id"); m_name = m_data.get("name", "").asString(); m_data.removeMember("name"); m_handleString = m_data.get("handle", "").asString(); m_handle = toUUID(m_handleString); m_data.removeMember("handle"); // look for different references std::string building_unit_id = m_data.get("building_unit_id", "").asString(); if (!building_unit_id.empty()){ m_objectReferenceMap.insert(std::make_pair("building_unit_id", FloorplanObject(building_unit_id, "", ""))); } m_data.removeMember("building_unit_id"); std::string thermal_zone_id = m_data.get("thermal_zone_id", "").asString(); if (!thermal_zone_id.empty()){ m_objectReferenceMap.insert(std::make_pair("thermal_zone_id", FloorplanObject(thermal_zone_id, "", ""))); } m_data.removeMember("thermal_zone_id"); std::string space_type_id = m_data.get("space_type_id", "").asString(); if (!space_type_id.empty()){ m_objectReferenceMap.insert(std::make_pair("space_type_id", FloorplanObject(space_type_id, "", ""))); } m_data.removeMember("space_type_id"); std::string construction_set_id = m_data.get("construction_set_id", "").asString(); if (!construction_set_id.empty()){ m_objectReferenceMap.insert(std::make_pair("construction_set_id", FloorplanObject(construction_set_id, "", ""))); } m_data.removeMember("construction_set_id"); //window_id //daylighting_control_id } std::string FloorplanObject::id() const { return m_id; } std::string FloorplanObject::name() const { return m_name; } UUID FloorplanObject::handle() const { return m_handle; } std::string FloorplanObject::handleString() const { return m_handleString; } boost::optional<std::string> FloorplanObject::parentHandleString() const { return m_parentHandleString; } void FloorplanObject::setParentHandleString(const std::string& parentHandleString) { m_parentHandleString = parentHandleString; } void FloorplanObject::resetParentHandleString() { m_parentHandleString.reset(); } boost::optional<double> FloorplanObject::getDataDouble(const std::string& key) const { Json::Value value = m_data.get(key, Json::nullValue); if (!value.isNull() && value.isNumeric()){ return value.asDouble(); } return boost::none; } boost::optional<int> FloorplanObject::getDataInt(const std::string& key) const { Json::Value value = m_data.get(key, Json::nullValue); if (!value.isNull() && value.isNumeric()){ return value.asInt(); } return boost::none; } boost::optional<bool> FloorplanObject::getDataBool(const std::string& key) const { Json::Value value = m_data.get(key, Json::nullValue); if (!value.isNull() && value.isBool()){ return value.asBool(); } return boost::none; } boost::optional<std::string> FloorplanObject::getDataString(const std::string& key) const { Json::Value value = m_data.get(key, Json::nullValue); if (!value.isNull() && value.isString()){ return value.asString(); } return boost::none; } boost::optional<FloorplanObject> FloorplanObject::getDataReference(const std::string& key) const { const auto& it = m_objectReferenceMap.find(key); if (it != m_objectReferenceMap.end()){ return it->second; } return boost::none; } void FloorplanObject::setDataDouble(const std::string& key, double value) { m_data[key] = value; } void FloorplanObject::setDataInt(const std::string& key, int value) { m_data[key] = value; } void FloorplanObject::setDataBool(const std::string& key, bool value) { m_data[key] = value; } void FloorplanObject::setDataString(const std::string& key, const std::string& value) { m_data[key] = value; } void FloorplanObject::setDataReference(const std::string& key, const FloorplanObject& value) { m_objectReferenceMap.insert(std::make_pair(key, value)); } Json::Value FloorplanObject::data() const { return m_data; } std::map<std::string, FloorplanObject> FloorplanObject::objectReferenceMap() const { return m_objectReferenceMap; } FloorplanJS::FloorplanJS() : m_lastId(0) { m_value = Json::Value(Json::objectValue); } FloorplanJS::FloorplanJS(const std::string& s) : m_lastId(0) { Json::Reader reader; bool parsingSuccessful = reader.parse(s, m_value); if (!parsingSuccessful){ std::string errors = reader.getFormattedErrorMessages(); // see if this is a path openstudio::path p = toPath(s); if (boost::filesystem::exists(p) && boost::filesystem::is_regular_file(p)){ // open file std::ifstream ifs(openstudio::toString(p)); m_value.clear(); parsingSuccessful = reader.parse(ifs, m_value); } if (!parsingSuccessful){ LOG_AND_THROW("ThreeJS JSON cannot be processed, " << errors); } } setLastId(m_value); } FloorplanJS::FloorplanJS(const Json::Value& value) : m_value(value), m_lastId(0) { setLastId(m_value); } boost::optional<FloorplanJS> FloorplanJS::load(const std::string& json) { try { FloorplanJS result(json); return result; } catch (...) { LOG(Error, "Could not parse JSON input"); } return boost::none; } std::string FloorplanJS::toJSON(bool prettyPrint) const { // write to string std::string result; if (prettyPrint){ Json::StyledWriter writer; result = writer.write(m_value); } else{ Json::FastWriter writer; result = writer.write(m_value); } return result; } std::string FloorplanJS::makeSurface(const Json::Value& story, const Json::Value& space, const std::string& parentSurfaceName, const std::string& parentSubSurfaceName, bool belowFloorPlenum, bool aboveCeilingPlenum, const std::string& surfaceType, const Point3dVector& vertices, size_t faceFormat, std::vector<ThreeGeometry>& geometries, std::vector<ThreeSceneChild>& sceneChildren, double illuminanceSetpoint) const { bool plenum = false; std::string spaceNamePostFix; if (belowFloorPlenum){ plenum = true; spaceNamePostFix = BELOWFLOORPLENUMPOSTFIX; } else if (aboveCeilingPlenum){ plenum = true; spaceNamePostFix = ABOVECEILINGPLENUMPOSTFIX; } std::string geometryId = std::string("Geometry ") + std::to_string(geometries.size()); std::string faceId = std::string("Face ") + std::to_string(geometries.size()); size_t n = vertices.size(); std::vector<size_t> faces; faces.push_back(faceFormat); for (size_t i = 0; i < n; ++i){ faces.push_back(i); } { std::string uuid = geometryId; std::string type = "Geometry"; ThreeGeometryData data(toThreeVector(vertices), faces); ThreeGeometry geometry(uuid, type, data); geometries.push_back(geometry); } { std::string uuid = faceId; std::string name = faceId; std::string type = "Mesh"; std::string materialId; ThreeUserData userData; userData.setName(faceId); std::string s; std::string id; // story assertKeyAndType(story, "name", Json::stringValue); s = story.get("name", "").asString(); userData.setBuildingStoryName(s); if (checkKeyAndType(story, "handle", Json::stringValue)){ s = story.get("handle", "").asString(); userData.setBuildingStoryHandle(s); } // space assertKeyAndType(space, "name", Json::stringValue); s = space.get("name", "").asString(); userData.setSpaceName(s + spaceNamePostFix); if (checkKeyAndType(space, "handle", Json::stringValue)){ s = space.get("handle", "").asString(); userData.setSpaceHandle(s); } // parent surface userData.setSurfaceName(parentSurfaceName); // parent sub surface userData.setSubSurfaceName(parentSubSurfaceName); // building unit if (checkKeyAndType(space, "building_unit_id", Json::stringValue)){ id = space.get("building_unit_id", "").asString(); if (const Json::Value* buildingUnit = findById(m_value["building_units"], id)){ assertKeyAndType(*buildingUnit, "name", Json::stringValue); s = buildingUnit->get("name", "").asString(); userData.setBuildingUnitName(s); if (checkKeyAndType(*buildingUnit, "handle", Json::stringValue)){ s = buildingUnit->get("handle", "").asString(); userData.setBuildingUnitHandle(s); } } else{ LOG(Error, "Cannot find BuildingUnit '" << id << "'"); } } // thermal zone if (checkKeyAndType(space, "thermal_zone_id", Json::stringValue)){ id = space.get("thermal_zone_id", "").asString(); if (const Json::Value* thermalZone = findById(m_value["thermal_zones"], id)){ assertKeyAndType(*thermalZone, "name", Json::stringValue); s = thermalZone->get("name", "").asString(); std::string thermalZoneName = s + spaceNamePostFix; userData.setThermalZoneName(thermalZoneName); if (plenum){ // DLM: we do not have this plenum thermal zone in the floorplan m_plenumThermalZoneNames.insert(thermalZoneName); }else{ if (checkKeyAndType(*thermalZone, "handle", Json::stringValue)){ s = thermalZone->get("handle", "").asString(); userData.setThermalZoneHandle(s); } } } else{ LOG(Error, "Cannot find ThermalZone '" << id << "'"); } } // space type if (plenum){ userData.setSpaceTypeName(PLENUMSPACETYPENAME); } else { if (checkKeyAndType(space, "space_type_id", Json::stringValue)){ id = space.get("space_type_id", "").asString(); if (const Json::Value* spaceType = findById(m_value["space_types"], id)){ assertKeyAndType(*spaceType, "name", Json::stringValue); s = spaceType->get("name", "").asString(); userData.setSpaceTypeName(s); if (checkKeyAndType(*spaceType, "handle", Json::stringValue)){ s = spaceType->get("handle", "").asString(); userData.setSpaceTypeHandle(s); } } else{ LOG(Error, "Cannot find SpaceType '" << id << "'"); } } } // construction set if (checkKeyAndType(space, "construction_set_id", Json::stringValue)){ id = space.get("construction_set_id", "").asString(); if (const Json::Value* constructionSet = findById(m_value["construction_sets"], id)){ assertKeyAndType(*constructionSet, "name", Json::stringValue); s = constructionSet->get("name", "").asString(); userData.setConstructionSetName(s); if (checkKeyAndType(*constructionSet, "handle", Json::stringValue)){ s = constructionSet->get("handle", "").asString(); userData.setConstructionSetHandle(s); } } else{ LOG(Error, "Cannot find ConstructionSet '" << id << "'"); } } userData.setSurfaceType(surfaceType); //userData.setAboveCeilingPlenum(aboveCeilingPlenum); //userData.setBelowFloorPlenum(belowFloorPlenum); userData.setIlluminanceSetpoint(illuminanceSetpoint); ThreeSceneChild sceneChild(uuid, name, type, geometryId, materialId, userData); sceneChildren.push_back(sceneChild); } return faceId; } void FloorplanJS::makeGeometries(const Json::Value& story, const Json::Value& space, bool belowFloorPlenum, bool aboveCeilingPlenum, double lengthToMeters, double minZ, double maxZ, const Json::Value& vertices, const Json::Value& edges, const Json::Value& faces, const std::string& faceId, bool openstudioFormat, std::vector<ThreeGeometry>& geometries, std::vector<ThreeSceneChild>& sceneChildren) const { std::vector<Point3d> faceVertices; std::vector<Point3d> windowCenterVertices; std::vector<std::string> windowDefinitionIds; std::vector<Point3d> daylightingControlVertices; const Json::Value windowDefinitions = m_value.get("window_definitions", Json::arrayValue); const Json::Value daylightingControlDefinitions = m_value.get("daylighting_control_definitions", Json::arrayValue); // get all the windows on this story std::map<std::string, std::vector<Json::Value> > edgeIdToWindowsMap; for (const auto& window : story.get("windows", Json::arrayValue)){ assertKeyAndType(window, "edge_id", Json::stringValue); std::string edgeId = window.get("edge_id", "").asString(); if (edgeIdToWindowsMap.find(edgeId) == edgeIdToWindowsMap.end()){ edgeIdToWindowsMap[edgeId] = std::vector<Json::Value>(); } edgeIdToWindowsMap[edgeId].push_back(window); } // get the face const Json::Value* face = findById(faces, faceId); if (face){ // get the edges Json::Value edgeIds = face->get("edge_ids", Json::arrayValue); Json::Value edgeOrders = face->get("edge_order", Json::arrayValue); Json::ArrayIndex edgeN = edgeIds.size(); OS_ASSERT(edgeN == edgeOrders.size()); // loop over edges for (Json::ArrayIndex edgeIdx = 0; edgeIdx < edgeN; ++edgeIdx){ std::string edgeId = edgeIds[edgeIdx].asString(); unsigned edgeOrder = edgeOrders[edgeIdx].asUInt(); // get the edge const Json::Value* edge = findById(edges, edgeId); if (edge){ Json::Value vertexIds = edge->get("vertex_ids", Json::arrayValue); OS_ASSERT(2u == vertexIds.size()); // get the vertices const Json::Value* nextVertex; const Json::Value* vertex1 = findById(vertices, vertexIds[0].asString()); const Json::Value* vertex2 = findById(vertices, vertexIds[1].asString()); vertex1 = findById(vertices, vertexIds[0].asString()); vertex2 = findById(vertices, vertexIds[1].asString()); if (edgeOrder == 1){ nextVertex = vertex1; }else{ nextVertex = vertex2; } OS_ASSERT(nextVertex); OS_ASSERT(vertex1); OS_ASSERT(vertex2); assertKeyAndType(*nextVertex, "x", Json::realValue); assertKeyAndType(*nextVertex, "y", Json::realValue); faceVertices.push_back(Point3d(lengthToMeters * nextVertex->get("x", 0.0).asDouble(), lengthToMeters * nextVertex->get("y", 0.0).asDouble(), 0.0)); // check if there are windows on this edge if (edgeIdToWindowsMap.find(edgeId) != edgeIdToWindowsMap.end()){ std::vector<Json::Value> windows = edgeIdToWindowsMap[edgeId]; for (const auto& window : windows){ assertKeyAndType(window, "window_definition_id", Json::stringValue); std::string windowDefinitionId = window.get("window_definition_id", "").asString(); std::vector<double> alphas; if (checkKeyAndType(window, "alpha", Json::realValue)){ alphas.push_back(window.get("alpha", 0.0).asDouble()); } else if (checkKeyAndType(window, "alpha", Json::arrayValue)){ Json::Value temp = window.get("alpha", Json::arrayValue); Json::ArrayIndex tempN = temp.size(); for (Json::ArrayIndex tempIdx = 0; tempIdx < tempN; ++tempIdx){ alphas.push_back(temp[tempIdx].asDouble()); } } for (const auto& alpha : alphas){ double x = (1.0 - alpha) * vertex1->get("x", 0.0).asDouble() + alpha * vertex2->get("x", 0.0).asDouble(); double y = (1.0 - alpha) * vertex1->get("y", 0.0).asDouble() + alpha * vertex2->get("y", 0.0).asDouble(); windowDefinitionIds.push_back(windowDefinitionId); windowCenterVertices.push_back(Point3d(lengthToMeters*x, lengthToMeters*y, 0.0)); } } } } } } // correct the floor vertices // simplify the vertices to remove potential duplicate, colinear points double tol = 0.001; faceVertices = simplify(faceVertices, false, tol); unsigned numPoints = faceVertices.size(); if (numPoints < 3){ //LOG(Error, "Cannot create a space for floorPrint of size " << faceVertices.size() << "."); return; } boost::optional<Vector3d> outwardNormal = getOutwardNormal(faceVertices); if (!outwardNormal){ //LOG(Error, "Cannot compute outwardNormal for floorPrint."); return; } if (outwardNormal->z() > 0){ faceVertices = reverse(faceVertices); } Point3dVectorVector allFinalFaceVertices; unsigned roofCeilingFaceFormat = 1024; unsigned wallFaceFormat = 1024; if (openstudioFormat){ allFinalFaceVertices.push_back(faceVertices); }else{ roofCeilingFaceFormat = 0; // triangle wallFaceFormat = 1; // quad allFinalFaceVertices = computeTriangulation(faceVertices, Point3dVectorVector()); } // create floor and ceiling for (const auto& finalFaceVertices : allFinalFaceVertices){ Point3dVector finalfloorVertices; Point3dVector finalRoofCeilingVertices; for (auto& v : finalFaceVertices){ finalfloorVertices.push_back(Point3d(v.x(), v.y(), minZ)); finalRoofCeilingVertices.push_back(Point3d(v.x(), v.y(), maxZ)); } makeSurface(story, space, "", "", belowFloorPlenum, aboveCeilingPlenum, "Floor", finalfloorVertices, roofCeilingFaceFormat, geometries, sceneChildren, 0); makeSurface(story, space, "", "", belowFloorPlenum, aboveCeilingPlenum, "RoofCeiling", reverse(finalRoofCeilingVertices), roofCeilingFaceFormat, geometries, sceneChildren, 0); } // create each wall std::set<unsigned> mappedWindows; for (unsigned i = 1; i <= numPoints; ++i){ Point3dVector wallVertices; wallVertices.push_back(Point3d(faceVertices[i - 1].x(), faceVertices[i - 1].y(), maxZ)); wallVertices.push_back(Point3d(faceVertices[i % numPoints].x(), faceVertices[i % numPoints].y(), maxZ)); wallVertices.push_back(Point3d(faceVertices[i % numPoints].x(), faceVertices[i % numPoints].y(), minZ)); wallVertices.push_back(Point3d(faceVertices[i - 1].x(), faceVertices[i - 1].y(), minZ)); // find windows that appear on this edge, can't use edge ids after simplify algorithm std::vector<Point3d> testSegment; testSegment.push_back(Point3d(faceVertices[i - 1].x(), faceVertices[i - 1].y(), 0.0)); testSegment.push_back(Point3d(faceVertices[i % numPoints].x(), faceVertices[i % numPoints].y(), 0.0)); Vector3d edgeVector = testSegment[1] - testSegment[0]; edgeVector.setLength(1.0); Vector3d upVector(0, 0, 1); Vector3d crossVector = upVector.cross(edgeVector); Point3dVectorVector allFinalWindowVertices; Point3dVectorVector allFinalShadeVertices; std::vector<unsigned> allFinalShadeParentSubSurfaceIndices; unsigned windowN = windowCenterVertices.size(); OS_ASSERT(windowN == windowDefinitionIds.size()); for (unsigned windowIdx = 0; windowIdx < windowN; ++windowIdx){ if (mappedWindows.find(windowIdx) == mappedWindows.end()){ if (getDistancePointToLineSegment(windowCenterVertices[windowIdx], testSegment) < tol){ // get window definition const Json::Value* windowDefinition = findById(windowDefinitions, windowDefinitionIds[windowIdx]); if (windowDefinition){ assertKeyAndType(*windowDefinition, "window_definition_type", Json::stringValue); std::string windowDefinitionType = windowDefinition->get("window_definition_type", "").asString(); //"name": "Single Window", //"window_definition_type": "Single Window", //"wwr": null, //"sill_height": 3, //"window_spacing": null, //"height": 4, //"width": 2, //"overhang_projection_factor": 0.5, //"fin_projection_factor": 0.5, //"type": "window_definitions" // DLM: TODO fins and overhangs if (istringEqual("Single Window", windowDefinitionType) || istringEqual("Repeating Windows", windowDefinitionType)){ assertKeyAndType(*windowDefinition, "sill_height", Json::realValue); assertKeyAndType(*windowDefinition, "height", Json::realValue); assertKeyAndType(*windowDefinition, "width", Json::realValue); double sillHeight = lengthToMeters * windowDefinition->get("sill_height", 0.0).asDouble(); double height = lengthToMeters * windowDefinition->get("height", 0.0).asDouble(); double width = lengthToMeters * windowDefinition->get("width", 0.0).asDouble(); Vector3d widthVector = edgeVector; widthVector.setLength(0.5*width); Point3d window1 = windowCenterVertices[windowIdx] + widthVector; widthVector.setLength(-0.5*width); Point3d window2 = windowCenterVertices[windowIdx] + widthVector; Point3dVector windowVertices; windowVertices.push_back(Point3d(window1.x(), window1.y(), sillHeight + height)); windowVertices.push_back(Point3d(window1.x(), window1.y(), sillHeight)); windowVertices.push_back(Point3d(window2.x(), window2.y(), sillHeight)); windowVertices.push_back(Point3d(window2.x(), window2.y(), sillHeight + height)); unsigned parentSubSurfaceIndex = allFinalWindowVertices.size(); allFinalWindowVertices.push_back(windowVertices); if (checkKeyAndType(*windowDefinition, "overhang_projection_factor", Json::realValue) || checkKeyAndType(*windowDefinition, "overhang_projection_factor", Json::intValue)){ double projectionFactor = windowDefinition->get("overhang_projection_factor", 0.0).asDouble(); if (projectionFactor > 0){ Vector3d outVector = crossVector; outVector.setLength(projectionFactor*height); Point3d window3 = window1 + outVector; Point3d window4 = window2 + outVector; Point3dVector shadeVertices; shadeVertices.push_back(Point3d(window1.x(), window1.y(), sillHeight + height)); shadeVertices.push_back(Point3d(window3.x(), window3.y(), sillHeight + height)); shadeVertices.push_back(Point3d(window4.x(), window4.y(), sillHeight + height)); shadeVertices.push_back(Point3d(window2.x(), window2.y(), sillHeight + height)); allFinalShadeVertices.push_back(shadeVertices); allFinalShadeParentSubSurfaceIndices.push_back(parentSubSurfaceIndex); } } if (checkKeyAndType(*windowDefinition, "fin_projection_factor", Json::realValue) || checkKeyAndType(*windowDefinition, "fin_projection_factor", Json::intValue)){ double projectionFactor = windowDefinition->get("fin_projection_factor", 0.0).asDouble(); if (projectionFactor > 0){ Vector3d outVector = crossVector; outVector.setLength(projectionFactor*height); Point3d window3 = window1 + outVector; Point3d window4 = window2 + outVector; Point3dVector shadeVertices; shadeVertices.push_back(Point3d(window1.x(), window1.y(), sillHeight + height)); shadeVertices.push_back(Point3d(window1.x(), window1.y(), sillHeight)); shadeVertices.push_back(Point3d(window3.x(), window3.y(), sillHeight)); shadeVertices.push_back(Point3d(window3.x(), window3.y(), sillHeight + height)); allFinalShadeVertices.push_back(shadeVertices); allFinalShadeParentSubSurfaceIndices.push_back(parentSubSurfaceIndex); shadeVertices.clear(); shadeVertices.push_back(Point3d(window4.x(), window4.y(), sillHeight + height)); shadeVertices.push_back(Point3d(window4.x(), window4.y(), sillHeight)); shadeVertices.push_back(Point3d(window2.x(), window2.y(), sillHeight)); shadeVertices.push_back(Point3d(window2.x(), window2.y(), sillHeight + height)); allFinalShadeVertices.push_back(shadeVertices); allFinalShadeParentSubSurfaceIndices.push_back(parentSubSurfaceIndex); } } } else if (istringEqual("Window to Wall Ratio", windowDefinitionType)){ assertKeyAndType(*windowDefinition, "sill_height", Json::realValue); assertKeyAndType(*windowDefinition, "wwr", Json::realValue); double sillHeight = lengthToMeters * windowDefinition->get("sill_height", 0.0).asDouble(); double wwr = windowDefinition->get("wwr", 0.0).asDouble(); double projectionFactor = 0.0; if (checkKeyAndType(*windowDefinition, "overhang_projection_factor", Json::realValue) || checkKeyAndType(*windowDefinition, "overhang_projection_factor", Json::intValue)){ projectionFactor = windowDefinition->get("overhang_projection_factor", 0.0).asDouble(); } // applyViewAndDaylightingGlassRatios does not currently take argument for fins Point3dVector viewVertices; Point3dVector daylightingVertices; // not populated Point3dVector exteriorShadingVertices; Point3dVector interiorShelfVertices; // not populated bool test = applyViewAndDaylightingGlassRatios(wwr, 0.0, sillHeight, 0.0, projectionFactor, 0.0, wallVertices, viewVertices, daylightingVertices, exteriorShadingVertices, interiorShelfVertices); if (test) { if (!viewVertices.empty()){ unsigned parentSubSurfaceIndex = allFinalWindowVertices.size(); allFinalWindowVertices.push_back(viewVertices); if (!exteriorShadingVertices.empty()){ allFinalShadeVertices.push_back(exteriorShadingVertices); allFinalShadeParentSubSurfaceIndices.push_back(parentSubSurfaceIndex); } } } } } mappedWindows.insert(windowIdx); } } } Point3dVectorVector allFinalWallVertices; unsigned finalWallFaceFormat = wallFaceFormat; if (openstudioFormat){ allFinalWallVertices.push_back(wallVertices); } else if (allFinalWindowVertices.empty()){ allFinalWallVertices.push_back(wallVertices); } else{ finalWallFaceFormat = 0; // triangle allFinalWallVertices = computeTriangulation(wallVertices, allFinalWindowVertices, tol); } std::string parentSurfaceName; for (const auto& finalWallVertices : allFinalWallVertices){ parentSurfaceName = makeSurface(story, space, "", "", belowFloorPlenum, aboveCeilingPlenum, "Wall", finalWallVertices, finalWallFaceFormat, geometries, sceneChildren, 0); } std::vector<std::string> parentSubSurfaceNames; for (const auto& finalWindowVertices : allFinalWindowVertices){ std::string parentSubSurfaceName = makeSurface(story, space, parentSurfaceName, "", belowFloorPlenum, aboveCeilingPlenum, "FixedWindow", finalWindowVertices, wallFaceFormat, geometries, sceneChildren, 0); parentSubSurfaceNames.push_back(parentSubSurfaceName); } size_t shadeN = allFinalShadeVertices.size(); OS_ASSERT(shadeN == allFinalShadeParentSubSurfaceIndices.size()); for (size_t shadeIdx = 0; shadeIdx < shadeN; ++shadeIdx){ std::string parentSubSurfaceName = parentSubSurfaceNames[allFinalShadeParentSubSurfaceIndices[shadeIdx]]; makeSurface(story, space, "", parentSubSurfaceName, belowFloorPlenum, aboveCeilingPlenum, "SpaceShading", allFinalShadeVertices[shadeIdx], wallFaceFormat, geometries, sceneChildren, 0); } } // get daylighting controls for (const auto& daylightingControl : space.get("daylighting_controls", Json::arrayValue)){ assertKeyAndType(daylightingControl, "vertex_id", Json::stringValue); std::string vertexId = daylightingControl.get("vertex_id", "").asString(); const Json::Value* vertex = findById(vertices, vertexId); if (vertex){ assertKeyAndType(daylightingControl, "daylighting_control_definition_id", Json::stringValue); std::string daylightingControlDefinitionId = daylightingControl.get("daylighting_control_definition_id", "").asString(); assertKeyAndType(*vertex, "x", Json::realValue); assertKeyAndType(*vertex, "y", Json::realValue); const Json::Value* daylightingControlDefinition = findById(daylightingControlDefinitions, daylightingControlDefinitionId); if (daylightingControlDefinition){ assertKey(*daylightingControlDefinition, "height"); assertKey(*daylightingControlDefinition, "illuminance_setpoint"); double height = lengthToMeters * daylightingControlDefinition->get("height", 0.0).asDouble(); double illuminanceSetpoint = daylightingControlDefinition->get("illuminance_setpoint", 0.0).asDouble(); // DLM: TODO put in unit conversion Point3dVector dcVertices; dcVertices.push_back(Point3d(lengthToMeters * vertex->get("x", 0.0).asDouble() - 0.1, lengthToMeters * vertex->get("y", 0.0).asDouble() + 0.1, height)); dcVertices.push_back(Point3d(lengthToMeters * vertex->get("x", 0.0).asDouble() + 0.1, lengthToMeters * vertex->get("y", 0.0).asDouble() + 0.1, height)); dcVertices.push_back(Point3d(lengthToMeters * vertex->get("x", 0.0).asDouble() + 0.1, lengthToMeters * vertex->get("y", 0.0).asDouble() - 0.1, height)); dcVertices.push_back(Point3d(lengthToMeters * vertex->get("x", 0.0).asDouble() - 0.1, lengthToMeters * vertex->get("y", 0.0).asDouble() - 0.1, height)); makeSurface(story, space, "", "", belowFloorPlenum, aboveCeilingPlenum, "DaylightingControl", dcVertices, wallFaceFormat, geometries, sceneChildren, illuminanceSetpoint); } } } } ThreeModelObjectMetadata FloorplanJS::makeModelObjectMetadata(const std::string& iddObjectType, const Json::Value& object) const { std::string handle; if (checkKeyAndType(object, "handle", Json::stringValue)){ handle = object.get("handle", "").asString(); } std::string name; if (checkKeyAndType(object, "name", Json::stringValue)){ name = object.get("name", "").asString(); } return ThreeModelObjectMetadata(iddObjectType, handle, name); } ThreeScene FloorplanJS::toThreeScene(bool openstudioFormat) const { m_plenumThermalZoneNames.clear(); std::vector<ThreeGeometry> geometries; std::vector<ThreeMaterial> materials; std::vector<ThreeSceneChild> children; std::vector<std::string> buildingStoryNames; std::vector<ThreeModelObjectMetadata> modelObjectMetadata; double currentZ = 0; // read project config std::string units = "ft"; Json::Value project = m_value.get("project", Json::objectValue); if (!project.isNull()){ Json::Value config = project.get("config", Json::objectValue); if (!config.isNull()){ units = config.get("units", units).asString(); } } // DLM: geometry in ThreeJS output is always in meters without north angle applied, north angle is applied directly to osm double lengthToMeters = 1; if (istringEqual(units, "ft")){ lengthToMeters = 0.3048; // don't use openstudio convert to keep dependencies low } bool anyPlenums = false; // loop over stories Json::Value stories = m_value.get("stories", Json::arrayValue); Json::ArrayIndex storyN = stories.size(); for (Json::ArrayIndex storyIdx = 0; storyIdx < storyN; ++storyIdx){ // get story properties modelObjectMetadata.push_back(makeModelObjectMetadata("OS:BuildingStory", stories[storyIdx])); std::string storyName; if (checkKeyAndType(stories[storyIdx], "name", Json::stringValue)){ buildingStoryNames.push_back(storyName); } double belowFloorPlenumHeight = 0; if (checkKeyAndType(stories[storyIdx], "below_floor_plenum_height", Json::realValue)){ belowFloorPlenumHeight = lengthToMeters * stories[storyIdx].get("below_floor_plenum_height", belowFloorPlenumHeight).asDouble(); } double floorToCeilingHeight = 3; if (checkKeyAndType(stories[storyIdx], "floor_to_ceiling_height", Json::realValue)){ floorToCeilingHeight = lengthToMeters * stories[storyIdx].get("floor_to_ceiling_height", floorToCeilingHeight).asDouble(); } double aboveCeilingPlenumHeight = 0; if (checkKeyAndType(stories[storyIdx], "above_ceiling_plenum_height", Json::realValue)){ aboveCeilingPlenumHeight = lengthToMeters * stories[storyIdx].get("above_ceiling_plenum_height", aboveCeilingPlenumHeight).asDouble(); } // DLM: temp code if (floorToCeilingHeight < 0.1){ //belowFloorPlenumHeight = 1; floorToCeilingHeight = 3; //aboveCeilingPlenumHeight = 1; } // get the geometry assertKeyAndType(stories[storyIdx], "geometry", Json::objectValue); Json::Value geometry = stories[storyIdx].get("geometry", Json::arrayValue); Json::Value vertices = geometry.get("vertices", Json::arrayValue); Json::Value edges = geometry.get("edges", Json::arrayValue); Json::Value faces = geometry.get("faces", Json::arrayValue); // loop over spaces Json::Value spaces = stories[storyIdx].get("spaces", Json::arrayValue); Json::ArrayIndex spaceN = spaces.size(); for (Json::ArrayIndex spaceIdx = 0; spaceIdx < spaceN; ++spaceIdx){ // each space should have one face if (checkKeyAndType(spaces[spaceIdx], "face_id", Json::stringValue)){ std::string faceId = spaces[spaceIdx].get("face_id", "").asString(); assertKeyAndType(spaces[spaceIdx], "name", Json::stringValue); std::string spaceName = spaces[spaceIdx].get("name", "").asString(); double minZ = currentZ; double maxZ = currentZ + belowFloorPlenumHeight; if (belowFloorPlenumHeight > 0){ anyPlenums = true; modelObjectMetadata.push_back(ThreeModelObjectMetadata("OS:Space", "", spaceName + BELOWFLOORPLENUMPOSTFIX)); makeGeometries(stories[storyIdx], spaces[spaceIdx], true, false, lengthToMeters, minZ, maxZ, vertices, edges, faces, faceId, openstudioFormat, geometries, children); } minZ = maxZ; maxZ += floorToCeilingHeight; if (floorToCeilingHeight > 0){ modelObjectMetadata.push_back(makeModelObjectMetadata("OS:Space", spaces[spaceIdx])); makeGeometries(stories[storyIdx], spaces[spaceIdx], false, false, lengthToMeters, minZ, maxZ, vertices, edges, faces, faceId, openstudioFormat, geometries, children); } minZ = maxZ; maxZ += aboveCeilingPlenumHeight; if (aboveCeilingPlenumHeight > 0){ anyPlenums = true; modelObjectMetadata.push_back(ThreeModelObjectMetadata("OS:Space", "", spaceName + ABOVECEILINGPLENUMPOSTFIX)); makeGeometries(stories[storyIdx], spaces[spaceIdx], false, true, lengthToMeters, minZ, maxZ, vertices, edges, faces, faceId, openstudioFormat, geometries, children); } } } // spaces // increment height for next story currentZ += belowFloorPlenumHeight + floorToCeilingHeight + aboveCeilingPlenumHeight; } // stories // loop over building_units Json::Value buildingUnits = m_value.get("building_units", Json::arrayValue); Json::ArrayIndex n = buildingUnits.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ modelObjectMetadata.push_back(makeModelObjectMetadata("OS:BuildingUnit", buildingUnits[i])); } // loop over thermal_zones Json::Value thermalZones = m_value.get("thermal_zones", Json::arrayValue); n = thermalZones.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ modelObjectMetadata.push_back(makeModelObjectMetadata("OS:ThermalZone", thermalZones[i])); } for (const auto& thermalZoneName : m_plenumThermalZoneNames){ modelObjectMetadata.push_back(ThreeModelObjectMetadata("OS:ThermalZone", "", thermalZoneName)); } // loop over space_types Json::Value spaceTypes = m_value.get("space_types", Json::arrayValue); n = spaceTypes.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ modelObjectMetadata.push_back(makeModelObjectMetadata("OS:SpaceType", spaceTypes[i])); } if (anyPlenums){ modelObjectMetadata.push_back(ThreeModelObjectMetadata("OS:SpaceType", "", PLENUMSPACETYPENAME)); } // loop over construction_sets Json::Value constructionSets = m_value.get("construction_sets", Json::arrayValue); n = constructionSets.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ modelObjectMetadata.push_back(makeModelObjectMetadata("OS:DefaultConstructionSet", constructionSets[i])); } // DLM: TODO set correct bounding box ThreeBoundingBox boundingBox(0,0,0,0,0,0,0,0,0,0); ThreeSceneMetadata metadata(buildingStoryNames, boundingBox, modelObjectMetadata); ThreeSceneObject sceneObject("", children); ThreeScene result(metadata, geometries, materials, sceneObject); return result; } double FloorplanJS::northAxis() const { double result = 0; Json::Value project = m_value.get("project", Json::objectValue); if (!project.isNull()){ if (project.isMember("north_axis")){ // current location result = project.get("north_axis", result).asDouble(); } else{ // previous location Json::Value config = project.get("config", Json::objectValue); if (!config.isNull()){ result = config.get("north_axis", result).asDouble(); } } } return result; } void FloorplanJS::updateStories(const std::vector<FloorplanObject>& objects, bool removeMissingObjects) { updateObjects(m_value, "stories", objects, removeMissingObjects); } void FloorplanJS::updateSpaces(const std::vector<FloorplanObject>& objects, bool removeMissingObjects) { std::map<std::string, std::vector<FloorplanObject> > storyHandleToSpaceObejctIds; for (const auto& object : objects){ boost::optional<std::string> parentHandleString = object.parentHandleString(); if (!parentHandleString){ continue; } if (storyHandleToSpaceObejctIds.find(*parentHandleString) == storyHandleToSpaceObejctIds.end()){ storyHandleToSpaceObejctIds[*parentHandleString] = std::vector<FloorplanObject>(); } storyHandleToSpaceObejctIds[*parentHandleString].push_back(object); } for (const auto& keyValue: storyHandleToSpaceObejctIds){ // no need to check by name, assume stories have been updated Json::Value* story = findByHandleString(m_value, "stories", keyValue.first); if (story){ updateObjects(*story, "spaces", keyValue.second, removeMissingObjects); } } } void FloorplanJS::updateBuildingUnits(const std::vector<FloorplanObject>& objects, bool removeMissingObjects) { updateObjects(m_value, "building_units", objects, removeMissingObjects); } void FloorplanJS::updateThermalZones(const std::vector<FloorplanObject>& objects, bool removeMissingObjects) { updateObjects(m_value, "thermal_zones", objects, removeMissingObjects); } void FloorplanJS::updateSpaceTypes(const std::vector<FloorplanObject>& objects, bool removeMissingObjects) { updateObjects(m_value, "space_types", objects, removeMissingObjects); } void FloorplanJS::updateConstructionSets(const std::vector<FloorplanObject>& objects, bool removeMissingObjects) { updateObjects(m_value, "construction_sets", objects, removeMissingObjects); } std::string FloorplanJS::getHandleString(const Json::Value& value) const { return value.get("handle", "").asString(); } std::string FloorplanJS::getName(const Json::Value& value) const { return value.get("name", "").asString(); } std::string FloorplanJS::getId(const Json::Value& value) const { return value.get("id", "").asString(); } std::string FloorplanJS::getFaceId(const Json::Value& value) const { return value.get("face_id", "").asString(); } std::string FloorplanJS::getNextId() { ++m_lastId; return std::to_string(m_lastId); } void FloorplanJS::setLastId(const Json::Value& value) { if (value.isObject()){ for (const auto& key : value.getMemberNames()){ const auto& value2 = value[key]; if (value2.isArray()){ Json::ArrayIndex n = value2.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ setLastId(value2[i]); } } else if (value2.isObject()){ setLastId(value2); } else if (key == "id"){ if (value2.isString()){ std::string s = value2.asString(); unsigned id = strtoul(s.c_str(), nullptr, 0); if (id > 100){ // DLM: TODO test code, remove bool test= false; } m_lastId = std::max(m_lastId, id); } else if (value2.isConvertibleTo(Json::ValueType::uintValue)){ unsigned id = value2.asUInt(); if (id > 100){ // DLM: TODO test code, remove bool test = false; } m_lastId = std::max(m_lastId, value2.asUInt()); } } } } } Json::Value* FloorplanJS::findByHandleString(Json::Value& value, const std::string& key, const std::string& handleString) { if (handleString.empty()){ return nullptr; } Json::Value& values = value[key]; Json::ArrayIndex n = values.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ if (getHandleString(values[i]) == handleString){ return &values[i]; } } return nullptr; } Json::Value* FloorplanJS::findByName(Json::Value& value, const std::string& key, const std::string& name, bool requireEmptyHandle) { if (name.empty()){ return nullptr; } Json::Value& values = value[key]; Json::ArrayIndex n = values.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ if (getName(values[i]) == name){ if (requireEmptyHandle && getHandleString(values[i]).empty()){ return &values[i]; } else{ return &values[i]; } } } return nullptr; } Json::Value* FloorplanJS::findById(Json::Value& value, const std::string& key, const std::string& id) { if (id.empty()){ return nullptr; } Json::Value& values = value[key]; Json::ArrayIndex n = values.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ if (getId(values[i]) == id){ return &values[i]; } } return nullptr; } const Json::Value* FloorplanJS::findById(const Json::Value& values, const std::string& id) const { if (id.empty()){ return nullptr; } Json::ArrayIndex n = values.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ if (getId(values[i]) == id){ return &values[i]; } } return nullptr; } void FloorplanJS::updateObjects(Json::Value& value, const std::string& key, const std::vector<FloorplanObject>& objects, bool removeMissingObjects) { // ensure key exists if (!value.isMember(key)){ value[key] = Json::Value(Json::arrayValue); } // remove all objects that aren't found by handle or name if (removeMissingObjects) { std::set<std::string> ids; std::set<std::string> names; std::set<std::string> handleStrings; for (const auto& object : objects){ ids.insert(object.id()); names.insert(object.name()); handleStrings.insert(object.handleString()); } std::vector<Json::ArrayIndex> indicesToRemove; std::set<std::string> faceIdsToRemove; Json::Value& values = value[key]; Json::ArrayIndex n = values.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ std::string handle = getHandleString(values[i]); // empty handle indicates that object has not been synched with the osm yet if (!handle.empty() && (handleStrings.find(handle) == handleStrings.end())){ // no object in floorplan should have an empty name if (names.find(getName(values[i])) == names.end()){ indicesToRemove.push_back(i); std::string faceId = getFaceId(values[i]); if (!faceId.empty()){ faceIdsToRemove.insert(faceId); } } } } std::reverse(indicesToRemove.begin(), indicesToRemove.end()); for (const auto& i : indicesToRemove){ Json::Value removed; values.removeIndex(i, &removed); } if (!faceIdsToRemove.empty()){ if (checkKeyAndType(value, "geometry", Json::objectValue)){ removeFaces(value["geometry"], faceIdsToRemove); } } } // now update names and data for (const auto& object : objects){ Json::Value* v = findByHandleString(value, key, object.handleString()); if (v){ // ensure name is the same (*v)["name"] = object.name(); } else { // find object by name only if handle is empty v = findByName(value, key, object.name(), true); if (v){ // set handle (*v)["handle"] = object.handleString(); } else{ // create new object Json::Value newObject(Json::objectValue); newObject["id"] = getNextId(); newObject["name"] = object.name(); newObject["handle"] = object.handleString(); v = &(value[key].append(newObject)); } } if (v){ // update properties Json::Value data = object.data(); for (const auto& key : data.getMemberNames()){ (*v)[key] = data[key]; } // update references for (const auto& p : object.objectReferenceMap()){ updateObjectReference(*v, p.first, p.second, removeMissingObjects); } } } } void FloorplanJS::updateObjectReference(Json::Value& value, const std::string& key, const FloorplanObject& objectReference, bool removeMissingObjects) { std::string searchKey; if (key == "thermal_zone_id"){ searchKey = "thermal_zones"; }else if (key == "space_type_id"){ searchKey = "space_types"; }else if (key == "building_unit_id"){ searchKey = "building_units"; } else if (key == "construction_set_id"){ searchKey = "construction_sets"; } if (searchKey.empty()){ LOG(Error, "Could not find objects to search for key '" << key << "'"); return; } Json::Value* v = findByHandleString(m_value, searchKey, objectReference.handleString()); if (v){ value[key] = v->get("id", "").asString(); return; } v = findById(m_value, searchKey, objectReference.id()); if (v){ value[key] = v->get("id", "").asString(); return; } v = findByName(m_value, searchKey, objectReference.name(), true); if (v){ value[key] = v->get("id", "").asString(); return; } value[key] = ""; } void FloorplanJS::removeFaces(Json::Value& value, const std::set<std::string>& faceIdsToRemove) { if (!checkKeyAndType(value, "faces", Json::arrayValue)){ return; } std::map<std::string, unsigned> edgeRefCount; std::set<std::string> edgeIdsToRemove; std::vector<Json::ArrayIndex> indicesToRemove; Json::Value& faces = value["faces"]; Json::ArrayIndex n = faces.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ bool removeFace = false; if (faceIdsToRemove.find(getId(faces[i])) != faceIdsToRemove.end()){ removeFace = true; indicesToRemove.push_back(i); } if (checkKeyAndType(faces[i], "edge_ids", Json::arrayValue)){ Json::Value& edgeIds = faces[i]["edge_ids"]; Json::ArrayIndex n2 = edgeIds.size(); for (Json::ArrayIndex i2 = 0; i2 < n2; ++i2){ std::string edgeId = edgeIds[i2].asString(); if (removeFace){ edgeIdsToRemove.insert(edgeId); } if (edgeRefCount.find(edgeId) == edgeRefCount.end()){ edgeRefCount[edgeId] = 1; }else{ edgeRefCount[edgeId] = edgeRefCount[edgeId] + 1; } } } } std::reverse(indicesToRemove.begin(), indicesToRemove.end()); // remove the faces for (const auto& i : indicesToRemove){ Json::Value removed; faces.removeIndex(i, &removed); } // don't remove edges with ref count > 1 for (const auto& pair : edgeRefCount){ if (pair.second > 1){ edgeIdsToRemove.erase(pair.first); } } removeEdges(value, edgeIdsToRemove); } void FloorplanJS::removeEdges(Json::Value& value, const std::set<std::string>& edgeIdsToRemove) { if (!checkKeyAndType(value, "edges", Json::arrayValue)){ return; } std::map<std::string, unsigned> vertexRefCount; std::set<std::string> vertexIdsToRemove; std::vector<Json::ArrayIndex> indicesToRemove; Json::Value& edges = value["edges"]; Json::ArrayIndex n = edges.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ bool removeEdge = false; if (edgeIdsToRemove.find(getId(edges[i])) != edgeIdsToRemove.end()){ removeEdge = true; indicesToRemove.push_back(i); } if (checkKeyAndType(edges[i], "vertex_ids", Json::arrayValue)){ Json::Value& vertexIds = edges[i]["vertex_ids"]; Json::ArrayIndex n2 = vertexIds.size(); for (Json::ArrayIndex i2 = 0; i2 < n2; ++i2){ std::string vertexId = vertexIds[i2].asString(); if (removeEdge){ vertexIdsToRemove.insert(vertexId); } if (vertexRefCount.find(vertexId) == vertexRefCount.end()){ vertexRefCount[vertexId] = 1; }else{ vertexRefCount[vertexId] = vertexRefCount[vertexId] + 1; } } } } std::reverse(indicesToRemove.begin(), indicesToRemove.end()); // remove the edges for (const auto& i : indicesToRemove){ Json::Value removed; edges.removeIndex(i, &removed); } // don't remove vertices with ref count > 1 for (const auto& pair : vertexRefCount){ if (pair.second > 2){ vertexIdsToRemove.erase(pair.first); } } removeVertices(value, vertexIdsToRemove); } void FloorplanJS::removeVertices(Json::Value& value, const std::set<std::string>& vertexIdsToRemove) { if (!checkKeyAndType(value, "vertices", Json::arrayValue)){ return; } std::vector<Json::ArrayIndex> indicesToRemove; Json::Value& vertices = value["vertices"]; Json::ArrayIndex n = vertices.size(); for (Json::ArrayIndex i = 0; i < n; ++i){ if (vertexIdsToRemove.find(getId(vertices[i])) != vertexIdsToRemove.end()){ indicesToRemove.push_back(i); } } std::reverse(indicesToRemove.begin(), indicesToRemove.end()); // remove the vertices for (const auto& i : indicesToRemove){ Json::Value removed; vertices.removeIndex(i, &removed); } } } // openstudio
n=int(input()) if n<=3: if n==3: mensaje="7" if n==2: mensaje="1" else: if n<=7: if n==4: mensaje="11" if n==5: mensaje="71" if n==6: mensaje="111" if n==7: mensaje="711" else: a=n//2 if (n%2==0): mensaje="1"*a else: mensaje="7"+("1"*(a-1)) print(mensaje)
// Generated by cdk-import import * as cdk from 'aws-cdk-lib'; import * as constructs from 'constructs'; /** * Schema for Module Fragment of type JFrog::Xray::EC2Instance::MODULE * * @schema CfnEc2InstanceModuleProps */ export interface CfnEc2InstanceModuleProps { /** * @schema CfnEc2InstanceModuleProps#Parameters */ readonly parameters?: CfnEc2InstanceModulePropsParameters; /** * @schema CfnEc2InstanceModuleProps#Resources */ readonly resources?: CfnEc2InstanceModulePropsResources; } /** * Converts an object of type 'CfnEc2InstanceModuleProps' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModuleProps(obj: CfnEc2InstanceModuleProps | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Parameters': toJson_CfnEc2InstanceModulePropsParameters(obj.parameters), 'Resources': toJson_CfnEc2InstanceModulePropsResources(obj.resources), }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParameters */ export interface CfnEc2InstanceModulePropsParameters { /** * Logical Id of the MODULE * * @schema CfnEc2InstanceModulePropsParameters#LogicalId */ readonly logicalId?: CfnEc2InstanceModulePropsParametersLogicalId; /** * ID of the private subnet in Availability Zone 2 of your existing VPC (e.g., subnet-z0376dab). * * @schema CfnEc2InstanceModulePropsParameters#PrivateSubnet2Id */ readonly privateSubnet2Id?: CfnEc2InstanceModulePropsParametersPrivateSubnet2Id; /** * ID of the private subnet in Availability Zone 1 of your existing VPC (e.g., subnet-z0376dab). * * @schema CfnEc2InstanceModulePropsParameters#PrivateSubnet1Id */ readonly privateSubnet1Id?: CfnEc2InstanceModulePropsParametersPrivateSubnet1Id; /** * @schema CfnEc2InstanceModulePropsParameters#KeyPairName */ readonly keyPairName?: CfnEc2InstanceModulePropsParametersKeyPairName; /** * @schema CfnEc2InstanceModulePropsParameters#MinScalingNodes */ readonly minScalingNodes?: CfnEc2InstanceModulePropsParametersMinScalingNodes; /** * @schema CfnEc2InstanceModulePropsParameters#MaxScalingNodes */ readonly maxScalingNodes?: CfnEc2InstanceModulePropsParametersMaxScalingNodes; /** * @schema CfnEc2InstanceModulePropsParameters#DeploymentTag */ readonly deploymentTag?: CfnEc2InstanceModulePropsParametersDeploymentTag; /** * JFrog Artifactory product you want to install into an AMI. * * @schema CfnEc2InstanceModulePropsParameters#ArtifactoryProduct */ readonly artifactoryProduct?: CfnEc2InstanceModulePropsParametersArtifactoryProduct; /** * @schema CfnEc2InstanceModulePropsParameters#QsS3BucketName */ readonly qsS3BucketName?: CfnEc2InstanceModulePropsParametersQsS3BucketName; /** * @schema CfnEc2InstanceModulePropsParameters#QsS3KeyPrefix */ readonly qsS3KeyPrefix?: CfnEc2InstanceModulePropsParametersQsS3KeyPrefix; /** * @schema CfnEc2InstanceModulePropsParameters#QsS3Uri */ readonly qsS3Uri?: CfnEc2InstanceModulePropsParametersQsS3Uri; /** * @schema CfnEc2InstanceModulePropsParameters#DatabaseDriver */ readonly databaseDriver?: CfnEc2InstanceModulePropsParametersDatabaseDriver; /** * @schema CfnEc2InstanceModulePropsParameters#DatabaseType */ readonly databaseType?: CfnEc2InstanceModulePropsParametersDatabaseType; /** * @schema CfnEc2InstanceModulePropsParameters#DatabaseUser */ readonly databaseUser?: CfnEc2InstanceModulePropsParametersDatabaseUser; /** * @schema CfnEc2InstanceModulePropsParameters#DatabasePassword */ readonly databasePassword?: CfnEc2InstanceModulePropsParametersDatabasePassword; /** * @schema CfnEc2InstanceModulePropsParameters#MasterKey */ readonly masterKey?: CfnEc2InstanceModulePropsParametersMasterKey; /** * @schema CfnEc2InstanceModulePropsParameters#ExtraJavaOptions */ readonly extraJavaOptions?: CfnEc2InstanceModulePropsParametersExtraJavaOptions; /** * @schema CfnEc2InstanceModulePropsParameters#SecurityGroups */ readonly securityGroups?: CfnEc2InstanceModulePropsParametersSecurityGroups; /** * @schema CfnEc2InstanceModulePropsParameters#XrayHostProfile */ readonly xrayHostProfile?: CfnEc2InstanceModulePropsParametersXrayHostProfile; /** * @schema CfnEc2InstanceModulePropsParameters#XrayHostRole */ readonly xrayHostRole?: CfnEc2InstanceModulePropsParametersXrayHostRole; /** * @schema CfnEc2InstanceModulePropsParameters#XrayInstanceType */ readonly xrayInstanceType?: CfnEc2InstanceModulePropsParametersXrayInstanceType; /** * @schema CfnEc2InstanceModulePropsParameters#JfrogInternalUrl */ readonly jfrogInternalUrl?: CfnEc2InstanceModulePropsParametersJfrogInternalUrl; /** * @schema CfnEc2InstanceModulePropsParameters#VolumeSize */ readonly volumeSize?: CfnEc2InstanceModulePropsParametersVolumeSize; /** * @schema CfnEc2InstanceModulePropsParameters#XrayDatabaseUser */ readonly xrayDatabaseUser?: CfnEc2InstanceModulePropsParametersXrayDatabaseUser; /** * @schema CfnEc2InstanceModulePropsParameters#XrayDatabasePassword */ readonly xrayDatabasePassword?: CfnEc2InstanceModulePropsParametersXrayDatabasePassword; /** * @schema CfnEc2InstanceModulePropsParameters#XrayMasterDatabaseUrl */ readonly xrayMasterDatabaseUrl?: CfnEc2InstanceModulePropsParametersXrayMasterDatabaseUrl; /** * @schema CfnEc2InstanceModulePropsParameters#XrayDatabaseUrl */ readonly xrayDatabaseUrl?: CfnEc2InstanceModulePropsParametersXrayDatabaseUrl; /** * @schema CfnEc2InstanceModulePropsParameters#XrayVersion */ readonly xrayVersion?: CfnEc2InstanceModulePropsParametersXrayVersion; /** * Directory to store Artifactory data. Can be used to store data (via symlink) in detachable volume * * @schema CfnEc2InstanceModulePropsParameters#UserDataDirectory */ readonly userDataDirectory?: CfnEc2InstanceModulePropsParametersUserDataDirectory; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParameters' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParameters(obj: CfnEc2InstanceModulePropsParameters | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'LogicalId': toJson_CfnEc2InstanceModulePropsParametersLogicalId(obj.logicalId), 'PrivateSubnet2Id': toJson_CfnEc2InstanceModulePropsParametersPrivateSubnet2Id(obj.privateSubnet2Id), 'PrivateSubnet1Id': toJson_CfnEc2InstanceModulePropsParametersPrivateSubnet1Id(obj.privateSubnet1Id), 'KeyPairName': toJson_CfnEc2InstanceModulePropsParametersKeyPairName(obj.keyPairName), 'MinScalingNodes': toJson_CfnEc2InstanceModulePropsParametersMinScalingNodes(obj.minScalingNodes), 'MaxScalingNodes': toJson_CfnEc2InstanceModulePropsParametersMaxScalingNodes(obj.maxScalingNodes), 'DeploymentTag': toJson_CfnEc2InstanceModulePropsParametersDeploymentTag(obj.deploymentTag), 'ArtifactoryProduct': toJson_CfnEc2InstanceModulePropsParametersArtifactoryProduct(obj.artifactoryProduct), 'QsS3BucketName': toJson_CfnEc2InstanceModulePropsParametersQsS3BucketName(obj.qsS3BucketName), 'QsS3KeyPrefix': toJson_CfnEc2InstanceModulePropsParametersQsS3KeyPrefix(obj.qsS3KeyPrefix), 'QsS3Uri': toJson_CfnEc2InstanceModulePropsParametersQsS3Uri(obj.qsS3Uri), 'DatabaseDriver': toJson_CfnEc2InstanceModulePropsParametersDatabaseDriver(obj.databaseDriver), 'DatabaseType': toJson_CfnEc2InstanceModulePropsParametersDatabaseType(obj.databaseType), 'DatabaseUser': toJson_CfnEc2InstanceModulePropsParametersDatabaseUser(obj.databaseUser), 'DatabasePassword': toJson_CfnEc2InstanceModulePropsParametersDatabasePassword(obj.databasePassword), 'MasterKey': toJson_CfnEc2InstanceModulePropsParametersMasterKey(obj.masterKey), 'ExtraJavaOptions': toJson_CfnEc2InstanceModulePropsParametersExtraJavaOptions(obj.extraJavaOptions), 'SecurityGroups': toJson_CfnEc2InstanceModulePropsParametersSecurityGroups(obj.securityGroups), 'XrayHostProfile': toJson_CfnEc2InstanceModulePropsParametersXrayHostProfile(obj.xrayHostProfile), 'XrayHostRole': toJson_CfnEc2InstanceModulePropsParametersXrayHostRole(obj.xrayHostRole), 'XrayInstanceType': toJson_CfnEc2InstanceModulePropsParametersXrayInstanceType(obj.xrayInstanceType), 'JfrogInternalUrl': toJson_CfnEc2InstanceModulePropsParametersJfrogInternalUrl(obj.jfrogInternalUrl), 'VolumeSize': toJson_CfnEc2InstanceModulePropsParametersVolumeSize(obj.volumeSize), 'XrayDatabaseUser': toJson_CfnEc2InstanceModulePropsParametersXrayDatabaseUser(obj.xrayDatabaseUser), 'XrayDatabasePassword': toJson_CfnEc2InstanceModulePropsParametersXrayDatabasePassword(obj.xrayDatabasePassword), 'XrayMasterDatabaseUrl': toJson_CfnEc2InstanceModulePropsParametersXrayMasterDatabaseUrl(obj.xrayMasterDatabaseUrl), 'XrayDatabaseUrl': toJson_CfnEc2InstanceModulePropsParametersXrayDatabaseUrl(obj.xrayDatabaseUrl), 'XrayVersion': toJson_CfnEc2InstanceModulePropsParametersXrayVersion(obj.xrayVersion), 'UserDataDirectory': toJson_CfnEc2InstanceModulePropsParametersUserDataDirectory(obj.userDataDirectory), }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsResources */ export interface CfnEc2InstanceModulePropsResources { /** * @schema CfnEc2InstanceModulePropsResources#XrayScalingGroup */ readonly xrayScalingGroup?: CfnEc2InstanceModulePropsResourcesXrayScalingGroup; /** * @schema CfnEc2InstanceModulePropsResources#XrayLaunchConfiguration */ readonly xrayLaunchConfiguration?: CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration; } /** * Converts an object of type 'CfnEc2InstanceModulePropsResources' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsResources(obj: CfnEc2InstanceModulePropsResources | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'XrayScalingGroup': toJson_CfnEc2InstanceModulePropsResourcesXrayScalingGroup(obj.xrayScalingGroup), 'XrayLaunchConfiguration': toJson_CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration(obj.xrayLaunchConfiguration), }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * Logical Id of the MODULE * * @schema CfnEc2InstanceModulePropsParametersLogicalId */ export interface CfnEc2InstanceModulePropsParametersLogicalId { /** * @schema CfnEc2InstanceModulePropsParametersLogicalId#Type */ readonly type: string; /** * @schema CfnEc2InstanceModulePropsParametersLogicalId#Description */ readonly description: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersLogicalId' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersLogicalId(obj: CfnEc2InstanceModulePropsParametersLogicalId | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, 'Description': obj.description, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * ID of the private subnet in Availability Zone 2 of your existing VPC (e.g., subnet-z0376dab). * * @schema CfnEc2InstanceModulePropsParametersPrivateSubnet2Id */ export interface CfnEc2InstanceModulePropsParametersPrivateSubnet2Id { /** * @schema CfnEc2InstanceModulePropsParametersPrivateSubnet2Id#Type */ readonly type: string; /** * @schema CfnEc2InstanceModulePropsParametersPrivateSubnet2Id#Description */ readonly description: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersPrivateSubnet2Id' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersPrivateSubnet2Id(obj: CfnEc2InstanceModulePropsParametersPrivateSubnet2Id | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, 'Description': obj.description, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * ID of the private subnet in Availability Zone 1 of your existing VPC (e.g., subnet-z0376dab). * * @schema CfnEc2InstanceModulePropsParametersPrivateSubnet1Id */ export interface CfnEc2InstanceModulePropsParametersPrivateSubnet1Id { /** * @schema CfnEc2InstanceModulePropsParametersPrivateSubnet1Id#Type */ readonly type: string; /** * @schema CfnEc2InstanceModulePropsParametersPrivateSubnet1Id#Description */ readonly description: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersPrivateSubnet1Id' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersPrivateSubnet1Id(obj: CfnEc2InstanceModulePropsParametersPrivateSubnet1Id | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, 'Description': obj.description, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersKeyPairName */ export interface CfnEc2InstanceModulePropsParametersKeyPairName { /** * @schema CfnEc2InstanceModulePropsParametersKeyPairName#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersKeyPairName' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersKeyPairName(obj: CfnEc2InstanceModulePropsParametersKeyPairName | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersMinScalingNodes */ export interface CfnEc2InstanceModulePropsParametersMinScalingNodes { /** * @schema CfnEc2InstanceModulePropsParametersMinScalingNodes#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersMinScalingNodes' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersMinScalingNodes(obj: CfnEc2InstanceModulePropsParametersMinScalingNodes | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersMaxScalingNodes */ export interface CfnEc2InstanceModulePropsParametersMaxScalingNodes { /** * @schema CfnEc2InstanceModulePropsParametersMaxScalingNodes#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersMaxScalingNodes' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersMaxScalingNodes(obj: CfnEc2InstanceModulePropsParametersMaxScalingNodes | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersDeploymentTag */ export interface CfnEc2InstanceModulePropsParametersDeploymentTag { /** * @schema CfnEc2InstanceModulePropsParametersDeploymentTag#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersDeploymentTag' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersDeploymentTag(obj: CfnEc2InstanceModulePropsParametersDeploymentTag | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * JFrog Artifactory product you want to install into an AMI. * * @schema CfnEc2InstanceModulePropsParametersArtifactoryProduct */ export interface CfnEc2InstanceModulePropsParametersArtifactoryProduct { /** * @schema CfnEc2InstanceModulePropsParametersArtifactoryProduct#Type */ readonly type: string; /** * @schema CfnEc2InstanceModulePropsParametersArtifactoryProduct#Description */ readonly description: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersArtifactoryProduct' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersArtifactoryProduct(obj: CfnEc2InstanceModulePropsParametersArtifactoryProduct | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, 'Description': obj.description, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersQsS3BucketName */ export interface CfnEc2InstanceModulePropsParametersQsS3BucketName { /** * @schema CfnEc2InstanceModulePropsParametersQsS3BucketName#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersQsS3BucketName' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersQsS3BucketName(obj: CfnEc2InstanceModulePropsParametersQsS3BucketName | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersQsS3KeyPrefix */ export interface CfnEc2InstanceModulePropsParametersQsS3KeyPrefix { /** * @schema CfnEc2InstanceModulePropsParametersQsS3KeyPrefix#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersQsS3KeyPrefix' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersQsS3KeyPrefix(obj: CfnEc2InstanceModulePropsParametersQsS3KeyPrefix | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersQsS3Uri */ export interface CfnEc2InstanceModulePropsParametersQsS3Uri { /** * @schema CfnEc2InstanceModulePropsParametersQsS3Uri#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersQsS3Uri' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersQsS3Uri(obj: CfnEc2InstanceModulePropsParametersQsS3Uri | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersDatabaseDriver */ export interface CfnEc2InstanceModulePropsParametersDatabaseDriver { /** * @schema CfnEc2InstanceModulePropsParametersDatabaseDriver#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersDatabaseDriver' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersDatabaseDriver(obj: CfnEc2InstanceModulePropsParametersDatabaseDriver | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersDatabaseType */ export interface CfnEc2InstanceModulePropsParametersDatabaseType { /** * @schema CfnEc2InstanceModulePropsParametersDatabaseType#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersDatabaseType' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersDatabaseType(obj: CfnEc2InstanceModulePropsParametersDatabaseType | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersDatabaseUser */ export interface CfnEc2InstanceModulePropsParametersDatabaseUser { /** * @schema CfnEc2InstanceModulePropsParametersDatabaseUser#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersDatabaseUser' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersDatabaseUser(obj: CfnEc2InstanceModulePropsParametersDatabaseUser | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersDatabasePassword */ export interface CfnEc2InstanceModulePropsParametersDatabasePassword { /** * @schema CfnEc2InstanceModulePropsParametersDatabasePassword#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersDatabasePassword' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersDatabasePassword(obj: CfnEc2InstanceModulePropsParametersDatabasePassword | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersMasterKey */ export interface CfnEc2InstanceModulePropsParametersMasterKey { /** * @schema CfnEc2InstanceModulePropsParametersMasterKey#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersMasterKey' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersMasterKey(obj: CfnEc2InstanceModulePropsParametersMasterKey | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersExtraJavaOptions */ export interface CfnEc2InstanceModulePropsParametersExtraJavaOptions { /** * @schema CfnEc2InstanceModulePropsParametersExtraJavaOptions#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersExtraJavaOptions' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersExtraJavaOptions(obj: CfnEc2InstanceModulePropsParametersExtraJavaOptions | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersSecurityGroups */ export interface CfnEc2InstanceModulePropsParametersSecurityGroups { /** * @schema CfnEc2InstanceModulePropsParametersSecurityGroups#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersSecurityGroups' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersSecurityGroups(obj: CfnEc2InstanceModulePropsParametersSecurityGroups | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersXrayHostProfile */ export interface CfnEc2InstanceModulePropsParametersXrayHostProfile { /** * @schema CfnEc2InstanceModulePropsParametersXrayHostProfile#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersXrayHostProfile' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersXrayHostProfile(obj: CfnEc2InstanceModulePropsParametersXrayHostProfile | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersXrayHostRole */ export interface CfnEc2InstanceModulePropsParametersXrayHostRole { /** * @schema CfnEc2InstanceModulePropsParametersXrayHostRole#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersXrayHostRole' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersXrayHostRole(obj: CfnEc2InstanceModulePropsParametersXrayHostRole | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersXrayInstanceType */ export interface CfnEc2InstanceModulePropsParametersXrayInstanceType { /** * @schema CfnEc2InstanceModulePropsParametersXrayInstanceType#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersXrayInstanceType' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersXrayInstanceType(obj: CfnEc2InstanceModulePropsParametersXrayInstanceType | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersJfrogInternalUrl */ export interface CfnEc2InstanceModulePropsParametersJfrogInternalUrl { /** * @schema CfnEc2InstanceModulePropsParametersJfrogInternalUrl#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersJfrogInternalUrl' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersJfrogInternalUrl(obj: CfnEc2InstanceModulePropsParametersJfrogInternalUrl | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersVolumeSize */ export interface CfnEc2InstanceModulePropsParametersVolumeSize { /** * @schema CfnEc2InstanceModulePropsParametersVolumeSize#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersVolumeSize' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersVolumeSize(obj: CfnEc2InstanceModulePropsParametersVolumeSize | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersXrayDatabaseUser */ export interface CfnEc2InstanceModulePropsParametersXrayDatabaseUser { /** * @schema CfnEc2InstanceModulePropsParametersXrayDatabaseUser#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersXrayDatabaseUser' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersXrayDatabaseUser(obj: CfnEc2InstanceModulePropsParametersXrayDatabaseUser | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersXrayDatabasePassword */ export interface CfnEc2InstanceModulePropsParametersXrayDatabasePassword { /** * @schema CfnEc2InstanceModulePropsParametersXrayDatabasePassword#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersXrayDatabasePassword' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersXrayDatabasePassword(obj: CfnEc2InstanceModulePropsParametersXrayDatabasePassword | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersXrayMasterDatabaseUrl */ export interface CfnEc2InstanceModulePropsParametersXrayMasterDatabaseUrl { /** * @schema CfnEc2InstanceModulePropsParametersXrayMasterDatabaseUrl#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersXrayMasterDatabaseUrl' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersXrayMasterDatabaseUrl(obj: CfnEc2InstanceModulePropsParametersXrayMasterDatabaseUrl | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersXrayDatabaseUrl */ export interface CfnEc2InstanceModulePropsParametersXrayDatabaseUrl { /** * @schema CfnEc2InstanceModulePropsParametersXrayDatabaseUrl#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersXrayDatabaseUrl' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersXrayDatabaseUrl(obj: CfnEc2InstanceModulePropsParametersXrayDatabaseUrl | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsParametersXrayVersion */ export interface CfnEc2InstanceModulePropsParametersXrayVersion { /** * @schema CfnEc2InstanceModulePropsParametersXrayVersion#Type */ readonly type: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersXrayVersion' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersXrayVersion(obj: CfnEc2InstanceModulePropsParametersXrayVersion | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * Directory to store Artifactory data. Can be used to store data (via symlink) in detachable volume * * @schema CfnEc2InstanceModulePropsParametersUserDataDirectory */ export interface CfnEc2InstanceModulePropsParametersUserDataDirectory { /** * @schema CfnEc2InstanceModulePropsParametersUserDataDirectory#Type */ readonly type: string; /** * @schema CfnEc2InstanceModulePropsParametersUserDataDirectory#Description */ readonly description: string; } /** * Converts an object of type 'CfnEc2InstanceModulePropsParametersUserDataDirectory' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsParametersUserDataDirectory(obj: CfnEc2InstanceModulePropsParametersUserDataDirectory | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, 'Description': obj.description, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsResourcesXrayScalingGroup */ export interface CfnEc2InstanceModulePropsResourcesXrayScalingGroup { /** * @schema CfnEc2InstanceModulePropsResourcesXrayScalingGroup#Type */ readonly type?: string; /** * @schema CfnEc2InstanceModulePropsResourcesXrayScalingGroup#Properties */ readonly properties?: any; } /** * Converts an object of type 'CfnEc2InstanceModulePropsResourcesXrayScalingGroup' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsResourcesXrayScalingGroup(obj: CfnEc2InstanceModulePropsResourcesXrayScalingGroup | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, 'Properties': obj.properties, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * @schema CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration */ export interface CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration { /** * @schema CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration#Type */ readonly type?: string; /** * @schema CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration#Properties */ readonly properties?: any; } /** * Converts an object of type 'CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration' to JSON representation. */ /* eslint-disable max-len, quote-props */ export function toJson_CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration(obj: CfnEc2InstanceModulePropsResourcesXrayLaunchConfiguration | undefined): Record<string, any> | undefined { if (obj === undefined) { return undefined; } const result = { 'Type': obj.type, 'Properties': obj.properties, }; // filter undefined values return Object.entries(result).reduce((r, i) => (i[1] === undefined) ? r : ({ ...r, [i[0]]: i[1] }), {}); } /* eslint-enable max-len, quote-props */ /** * A CloudFormation `JFrog::Xray::EC2Instance::MODULE` * * @cloudformationResource JFrog::Xray::EC2Instance::MODULE * @stability external * @link http://unknown-url */ export class CfnEc2InstanceModule extends cdk.CfnResource { /** * The CloudFormation resource type name for this resource class. */ public static readonly CFN_RESOURCE_TYPE_NAME = "JFrog::Xray::EC2Instance::MODULE"; /** * Resource props. */ public readonly props: CfnEc2InstanceModuleProps; /** * Create a new `JFrog::Xray::EC2Instance::MODULE`. * * @param scope - scope in which this resource is defined * @param id - scoped id of the resource * @param props - resource properties */ constructor(scope: constructs.Construct, id: string, props: CfnEc2InstanceModuleProps) { super(scope, id, { type: CfnEc2InstanceModule.CFN_RESOURCE_TYPE_NAME, properties: toJson_CfnEc2InstanceModuleProps(props)! }); this.props = props; } }
import React from 'react' import styles from './styles.module.scss' // ___________ // type BlockContainerProps = { gap?: number | string } // ___________ // const BlockContainer: React.FC<BlockContainerProps> = ({ gap, children }) => { return ( <div className={styles.blockContainer} style={{ gap: gap || '12px' }}> {children} </div> ) } export default BlockContainer
/** * Converts pde line number to java line number * @param pdeLineNum - pde line number * @return */ protected int pdeLineNumToJavaLineNum(int pdeLineNum){ int javaLineNumber = pdeLineNum + errorCheckerService.getPdeImportsCount(); int codeIndex = editor.getSketch().getCodeIndex(editor.getCurrentTab()); if (codeIndex > 0) for (int i = 0; i < codeIndex; i++) { SketchCode sc = editor.getSketch().getCode(i); int len = Util.countLines(sc.getProgram()) + 1; javaLineNumber += len; } return javaLineNumber; }
from collections import Counter n=int(input()) l=list(input().split()) c=Counter(l) if c['100']%2==0 and c['200']%2==0: print("YES") elif c['100']%2==0 and c['100']>0: print("YES") else: print("NO")
package olie; /** * @Auther: niexianglin you can mail to <EMAIL> * @Date: 2018/7/11 11:04 * @Description: */ public class _SelectIn { /** * Slect IN : 在什么位置打开当前处于编辑状态的文件。Alt + F1 * 最常用的地方:在项目结构中打开文件 Alt + F1 -> 1 */ /** * 操作路径:Navigate -> Select In */ /** * 感受:实用价值还可以,有趣价值还可以 */ }
package main import ( "context" "encoding/json" "github.com/go-redis/redis/v8" "time" ) type ClientMarshalerInterface interface { Get(key string, returnObj interface{}) (interface{}, error) Set(key string, object interface{}, expiration time.Duration) error } type ClientMarshaler struct { client redis.Cmdable context context.Context } func NewMarshaller(cache redis.Cmdable, ctx context.Context) *ClientMarshaler { return &ClientMarshaler{ client: cache, context: ctx, } } func (c *ClientMarshaler) Get(key string, returnObj interface{}) (interface{}, error) { result, err := c.client.Get(c.context, key).Result() if err != nil { return nil, err } switch returnObj.(type) { case string: return result, nil } // Default case, meaning desired return is a not a string unmarshallErr := json.Unmarshal([]byte(result), returnObj) if unmarshallErr != nil { return nil, unmarshallErr } return returnObj, nil } func (c *ClientMarshaler) Set(key string, object interface{}, expiration time.Duration) error { var value interface{} var err error switch object.(type) { case string: value = object.(string) default: value, err = json.Marshal(object) if err != nil { return err } } return c.client.Set(c.context, key, value, expiration).Err() }
#pragma once #include <catboost/idl/pool/proto/quantization_schema.pb.h> #include <catboost/idl/pool/proto/metainfo.pb.h> #include <catboost/libs/helpers/exception.h> #include <catboost/private/libs/options/enums.h> #include <util/generic/yexception.h> #include <util/system/types.h> namespace NCB { namespace NQuantizationSchemaDetail { inline ENanMode NanModeFromProto(const NIdl::ENanMode proto) { switch (proto) { case NIdl::NM_MIN: return ENanMode::Min; case NIdl::NM_MAX: return ENanMode::Max; case NIdl::NM_FORBIDDEN: return ENanMode::Forbidden; case NIdl::NM_UNKNOWN: // Native `ENanMode` doesn't have `Unknown` member break; } ythrow TCatBoostException() << "got unexpected enum " << static_cast<int>(proto); } inline NIdl::ENanMode NanModeToProto(const ENanMode native) { switch (native) { case ENanMode::Min: return NIdl::NM_MIN; case ENanMode::Max: return NIdl::NM_MAX; case ENanMode::Forbidden: return NIdl::NM_FORBIDDEN; } ythrow TCatBoostException() << "got unexpected enum " << static_cast<int>(native); } bool IsFakeIndex(ui32 index, const NIdl::TPoolMetainfo& metaInfo); } }
import Joi from 'joi'; export default { guid: Joi.alternatives() .try( Joi.any().only().allow(null), Joi.string().trim(false).length(24).hex() ) .required(), cdate: Joi.alternatives() .try(Joi.any().only().allow(null), Joi.number()) .required(), mdate: Joi.alternatives() .try(Joi.any().only().allow(null), Joi.number()) .required(), tags: Joi.array() .items( Joi.string().pattern(/[[\x01-\x1F\x7F]]/, { name: 'control characters', invert: true, }) ) .required(), };
package order import ( "encoding/json" "fmt" "strings" ) // SuppressionListField client suppression list order field. type SuppressionListField int8 const ( // BySuppressedEmailAddress email address. BySuppressedEmailAddress SuppressionListField = iota // BySuppressionDate suppression date. BySuppressionDate ) var ( suppressionFiledToString = map[SuppressionListField]string{ BySuppressedEmailAddress: "email", BySuppressionDate: "date", } stringToSuppressionFiled = map[string]SuppressionListField{ "email": BySuppressedEmailAddress, "date": BySuppressionDate, } ) // UnmarshalJSON parses the json bytes into a SuppressionListField value. func (f *SuppressionListField) UnmarshalJSON(bytes []byte) error { var value string if err := json.Unmarshal(bytes, &value); err != nil { return fmt.Errorf("order-by filed should be a string, got %s", bytes) } field, ok := stringToSuppressionFiled[strings.ToLower(value)] if !ok { return fmt.Errorf("invalid order-by field %q", value) } *f = field return nil } // String Stringer implementation func (f SuppressionListField) String() string { return suppressionFiledToString[f] }
#include <stdio.h> #include <stdlib.h> #include <string.h> int main() { char *a; char *b; a=(char *)malloc(200005*sizeof(char)); b=(char *)malloc(200005*sizeof(char)); scanf("%s",a); scanf("%s",b); long l1=strlen(a); long l2=strlen(b); long i=0; long arr2[26][2]; for(i=0;i<26;i++) { arr2[i][0]=0; arr2[i][1]=0; } int x=0,y=0; long f1=0,f2=0; for(i=0;i<l2;i++) { if(b[i]<=90) { x=b[i]-65; arr2[x][0]++; } else if(b[i]>=97) { y=b[i]-97; arr2[y][1]++; } } for(i=0;i<l1;i++) { if(a[i]<=90) { x=a[i]-65; if(arr2[x][0]>=1) { a[i]=0; arr2[x][0]--; f1++; } } else if(a[i]>=97) { y=a[i]-97; if(arr2[y][1]>=1) { a[i]=0; arr2[y][1]--; f1++; } } } for(i=0;i<l1;i++) { if(a[i]<=90 && a[i]!=0) { x=a[i]-65; if(arr2[x][1]>=1) { arr2[x][1]--; f2++; } } else if(a[i]>=97 && a[i]!=0) { y=a[i]-97; if(arr2[y][0]>=1) { arr2[y][0]--; f2++; } } } printf("%ld %ld",f1,f2); return 0; }
<filename>src/ch04/ch04.exercises.hs myConcat :: [[a]] -> [a] myConcat xs = foldr (\acc x -> acc ++ x) [] xs recursiveTakeWhile :: (a -> Bool) -> [a] -> [a] recursiveTakeWhile cond [] = [] recursiveTakeWhile cond (x:xs) = if not (cond (x)) then [] else x : (recursiveTakeWhile cond xs) myTakeWhile :: (a -> Bool) -> [a] -> [a] myTakeWhile cond xs = foldr (\x y -> if cond (x) then x : y else []) [] xs myGroupBy :: (a -> a -> Bool) -> [a] -> [[a]] myGroupBy cond xs = foldr (\x acc -> case acc of [] -> [[x]] ((y:yGroup):rest) -> if (cond x y) then [x : y : yGroup] ++ rest else [x] : [y : yGroup] ++ rest) [] xs myAny :: (a -> Bool) -> [a] -> Bool myAny cond xs = foldr f False xs where f x a = if cond (x) then True else a myCycle :: [a] -> [a] myCycle xs = xs' where xs' = xs ++ xs' myWords :: String -> [String] myWords xs = foldr f [""] xs where f x [""] = [[x]] f x (('\n':group):rest) = [x] : group : rest f x ((' ':group):rest) = [x] : group : rest f x (group:rest) = [x : group] ++ rest myUnlines :: [String] -> String myUnlines xs = foldl (\acc x -> acc ++ x ++ ['\n']) "" xs where myTail :: [a] -> [a] myTail [] = [] myTail (_:rest) = rest
#include <bits/stdc++.h> using namespace std; typedef long long ll; const ll M = 1000000007; int main() { ll p, k, ans; cin >> p >> k; if (k % p == 1) { /* * anything goes. * ans = p^p */ ans = 1; for (int i = 0; i < p; i++) { ans = (ans*p) % M; } } else { /* * f(0) = 0 necessarily. Then count number of orbits in (Z/pZ)* according to * action of multiplying by k. * ans = p^(num of orbits) */ ll tmp = k*k % p; int orbit = 1; while (tmp != k) { tmp = (tmp*k) % p; orbit++; } int fix = (p-1)/orbit; ans = 1; for (int i = 0; i < fix; i++) { ans = (ans*p) % M; } } cout << ans << endl; return 0; }
/** * Request a permission and optionally register a callback for the current activity * * @param requestedPermissions Array of permissions as defined in Manifest * @param permissionCallback function called with result of permission prompt * @param requestCode - 8 Bit value to associate callback with request - if none is provided, a system generated one is used * @return true in case of valid request, false if requested permission is not a valid one */ @Kroll.method public boolean requestPermissions(@Kroll.argument String[] requestedPerms, @Kroll.argument(optional = true) KrollFunction permissionCallback, @Kroll.argument(optional = true) Integer requestCode) { for(String permission:requestedPerms) { Log.d(LCAT, "Requesting permission: " + permission); if (!isValidPermissionString(permission)) { Log.e(LCAT, "Requested permission is not supported :" + permission); return false; } } return handleRequest(requestedPerms, requestCode, permissionCallback); }
/** * @author Gleb Bondarchuk * Created on 04, 22 2018. */ public class ShadowQueryTest { @Test public void shadowQueryTest() { Javers javers = JaversBuilder .javers() .withCommitIdGenerator(CommitIdGenerator.RANDOM) .build(); Employee ryan = new Employee("Ryan", 1000); Map<String, String> properties1 = new HashMap<>(); properties1.put("1", "1"); javers.commit("author", ryan, properties1); ryan.setSalary(2000); Map<String, String> properties2 = new HashMap<>(); properties2.put("2", "2"); javers.commit("author", ryan, properties2); JqlQuery query = QueryBuilder.byInstanceId("Ryan", Employee.class) .withChildValueObjects() .withNewObjectChanges() .skip(0) .limit(6) .withScopeDeepPlus(1000) .build(); List<Shadow<Employee>> shadows = javers.findShadows(query); assertEquals(2, shadows.size()); } }
<reponame>sousa-andre/ipvcEI-Bot import fetch from 'node-fetch' export interface Stock { symbol: string displayName: string regularMarketPrice: string } export const getStock = async (stockTicker: string) => { let req = await fetch( `https://stock-data-yahoo-finance-alternative.p.rapidapi.com/v6/finance/quote?symbols=${stockTicker}`, { method: 'GET', headers: { 'x-rapidapi-host': 'stock-data-yahoo-finance-alternative.p.rapidapi.com', 'x-rapidapi-key': '<KEY>', }, }, ) let response = await req.json() let reData = await response.quoteResponse.result[0] return reData }
<reponame>lukasz-okuniewicz/custom-pixi-particles export default { ANGULAR_BEHAVIOUR: 'AngularVelocityBehaviour', LIFE_BEHAVIOUR: 'LifeBehaviour', COLOR_BEHAVIOUR: 'ColorBehaviour', POSITION_BEHAVIOUR: 'PositionBehaviour', SIZE_BEHAVIOUR: 'SizeBehaviour', EMIT_DIRECTION: 'EmitDirectionBehaviour', ROTATION_BEHAVIOUR: 'RotationBehaviour', TURBULENCE_BEHAVIOUR: 'TurbulenceBehaviour', }
import { Component, Input, Output, EventEmitter, OnInit, OnChanges, ElementRef } from '@angular/core'; @Component({ selector: 'my-checkbox-switchery-double', template: ` <div class="checkbox checkbox-switchery switchery-{{ size }} switchery-double" style="margin-left: 15px;"> <label> {{ label }} <input [ngModel]="value" (ngModelChange)="valueChange.emit($event)" [checked]="value" type="checkbox" class="switchery"> {{ label_2 }} </label> </div> ` }) export class MyCheckboxSwitcheryDouble implements OnInit, OnChanges { @Input() public value:boolean = false; @Input() public label: string = ""; @Input() public label_2: string = ""; @Input() public size: string = "sm"; private switcher; @Output() public valueChange: EventEmitter<boolean> = new EventEmitter<boolean>(); public previousValue = false; public constructor(private eRef: ElementRef) {} ngOnInit(){ this.switcher = new Switchery(this.eRef.nativeElement.querySelector('input.switchery')); } ngOnChanges(changes){ this.previousValue = (typeof changes.value.previousValue == 'boolean') ? changes.value.previousValue : this.previousValue; if(changes.value != undefined && this.previousValue != changes.value.currentValue ) { this.switcher.setPosition(true); } } }
package pgxschema import ( "context" "fmt" "math/rand" "sync" "testing" "time" "github.com/jackc/pgx/v4/pgxpool" ) // TestCreateMigrationsTable ensures that each test datbase can // successfully create the schema_migrations table. func TestCreateMigrationsTable(t *testing.T) { withEachDB(t, func(db *pgxpool.Pool) { migrator := makeTestMigrator() err := migrator.createMigrationsTable(db) if err != nil { t.Errorf("Error occurred when creating migrations table: %s", err) } // Test that we can re-run it safely err = migrator.createMigrationsTable(db) if err != nil { t.Errorf("Calling createMigrationsTable a second time failed: %s", err) } }) } // TestLockAndUnlock tests the Lock and Unlock mechanisms of each // test database in isolation from any migrations actually being run. func TestLockAndUnlock(t *testing.T) { withEachDB(t, func(db *pgxpool.Pool) { m := makeTestMigrator() err := m.lock(db) if err != nil { t.Error(err) } err = m.unlock(db) if err != nil { t.Error(err) } }) } // TestApplyInLexicalOrder ensures that each test database runs migrations in // lexical order rather than the order they were provided in the slice. This is // also the primary test to assert that the data in the tracking table is // all correct. func TestApplyInLexicalOrder(t *testing.T) { withEachDB(t, func(db *pgxpool.Pool) { start := time.Now().Truncate(time.Second) // MySQL has only second accuracy, so we need start/end to span 1 second tableName := "lexical_order_migrations" migrator := NewMigrator(WithTableName(tableName)) err := migrator.Apply(db, unorderedMigrations()) if err != nil { t.Error(err) } end := time.Now().Add(time.Second).Truncate(time.Second) // MySQL has only second accuracy, so we need start/end to span 1 second applied, err := migrator.GetAppliedMigrations(db) if err != nil { t.Error(err) } if len(applied) != 3 { t.Errorf("Expected exactly 2 applied migrations. Got %d", len(applied)) } firstMigration := applied["2021-01-01 001"] if firstMigration == nil { t.Fatal("Missing first migration") } if firstMigration.Checksum == "" { t.Error("Expected non-blank Checksum value after successful migration") } if firstMigration.ExecutionTimeInMillis < 1 { t.Errorf("Expected ExecutionTimeInMillis of %s to be tracked. Got %d", firstMigration.ID, firstMigration.ExecutionTimeInMillis) } // Put value in consistent timezone to aid error message readability appliedAt := firstMigration.AppliedAt.Round(time.Second) if appliedAt.IsZero() || appliedAt.Before(start) || appliedAt.After(end) { t.Errorf("Expected AppliedAt between %s and %s, got %s", start, end, appliedAt) } assertZonesMatch(t, start, appliedAt) secondMigration := applied["2021-01-01 002"] if secondMigration == nil { t.Fatal("Missing second migration") } else if secondMigration.Checksum == "" { t.Fatal("Expected checksum to get populated when migration ran") } if firstMigration.AppliedAt.After(secondMigration.AppliedAt) { t.Errorf("Expected migrations to run in lexical order, but first migration ran at %s and second one ran at %s", firstMigration.AppliedAt, secondMigration.AppliedAt) } }) } // TestFailedMigration ensures that a migration with a syntax error triggers // an expected error when Apply() is run. This test is run on every test database func TestFailedMigration(t *testing.T) { withEachDB(t, func(db *pgxpool.Pool) { tableName := time.Now().Format(time.RFC3339Nano) migrator := NewMigrator(WithTableName(tableName)) migrations := []*Migration{ { ID: "2019-01-01 Bad Migration", Script: "CREATE TIBBLE bad_table_name (id INTEGER NOT NULL PRIMARY KEY)", }, } err := migrator.Apply(db, migrations) expectErrorContains(t, err, "TIBBLE") query := "SELECT * FROM " + migrator.QuotedTableName() rows, _ := db.Query(context.Background(), query) defer rows.Close() // We expect either an error (because the transaction was rolled back // and the table no longer exists)... or a query with no results if rows != nil { if rows.Next() { t.Error("Record was inserted in tracking table even though the migration failed") } } }) } // TestSimultaneousApply creates multiple Migrators and multiple distinct // connections to each test database and attempts to call .Apply() on them all // concurrently. The migrations include an INSERT statement, which allows us // to count to ensure that each unique migration was only run once. // func TestSimultaneousApply(t *testing.T) { concurrency := 4 dataTable := fmt.Sprintf("data%d", rand.Int()) // #nosec don't need a strong RNG here migrationsTable := fmt.Sprintf("Migrations %s", time.Now().Format(time.RFC3339Nano)) sharedMigrations := []*Migration{ { ID: "2020-05-01 Sleep", Script: "SELECT pg_sleep(1)", }, { ID: "2020-05-02 Create Data Table", Script: fmt.Sprintf(`CREATE TABLE %s ( id INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, created_at TIMESTAMP WITH TIME ZONE NOT NULL )`, dataTable), }, { ID: "2020-05-03 Add Initial Record", Script: fmt.Sprintf(`INSERT INTO %s (created_at) VALUES (NOW())`, dataTable), }, } var wg sync.WaitGroup for i := 0; i < concurrency; i++ { wg.Add(1) go func(i int) { db := connectDB(t, "postgres:latest") migrator := NewMigrator(WithTableName(migrationsTable)) err := migrator.Apply(db, sharedMigrations) if err != nil { t.Error(err) } _, err = db.Exec(context.Background(), fmt.Sprintf("INSERT INTO %s (created_at) VALUES (NOW())", dataTable)) if err != nil { t.Error(err) } wg.Done() }(i) } wg.Wait() // We expect concurrency + 1 rows in the data table // (1 from the migration, and one each for the // goroutines which ran Apply and then did an // insert afterwards) db := connectDB(t, "postgres:latest") count := 0 row := db.QueryRow(context.Background(), fmt.Sprintf("SELECT COUNT(*) FROM %s", dataTable)) err := row.Scan(&count) if err != nil { t.Error(err) } if count != concurrency+1 { t.Errorf("Expected to get %d rows in %s table. Instead got %d", concurrency+1, dataTable, count) } } func TestApplyMultistatementMigrations(t *testing.T) { withEachDB(t, func(db *pgxpool.Pool) { migrator := makeTestMigrator() migrationSet1 := []*Migration{ { ID: "2019-09-23 Create Artists and Albums", Script: ` CREATE TABLE artists ( id SERIAL PRIMARY KEY, name CHARACTER VARYING (255) NOT NULL DEFAULT '' ); CREATE UNIQUE INDEX idx_artists_name ON artists (name); CREATE TABLE albums ( id SERIAL PRIMARY KEY, title CHARACTER VARYING (255) NOT NULL DEFAULT '', artist_id INTEGER NOT NULL REFERENCES artists(id) ); `, }, } err := migrator.Apply(db, migrationSet1) if err != nil { t.Error(err) } err = migrator.Apply(db, migrationSet1) if err != nil { t.Error(err) } migrationSet2 := []*Migration{ { ID: "2019-09-24 Create Tracks", Script: ` CREATE TABLE tracks ( id SERIAL PRIMARY KEY, name CHARACTER VARYING (255) NOT NULL DEFAULT '', artist_id INTEGER NOT NULL REFERENCES artists(id), album_id INTEGER NOT NULL REFERENCES albums(id) );`, }, } err = migrator.Apply(db, migrationSet2) if err != nil { t.Error(err) } }) } // makeTestMigrator is a utility function which produces a migrator with an // isolated environment (isolated due to a unique name for the migration // tracking table). func makeTestMigrator() *Migrator { tableName := time.Now().Format(time.RFC3339Nano) return NewMigrator(WithTableName(tableName)) } func testMigrations(t *testing.T, dirName string) []*Migration { path := fmt.Sprintf("test-migrations/%s", dirName) migrations, err := MigrationsFromDirectoryPath(path) if err != nil { t.Fatalf("Failed to load test migrations from '%s'", path) } return migrations } // assertZonesMatch accepts two Times and fails the test if their time zones // don't match. func assertZonesMatch(t *testing.T, expected, actual time.Time) { t.Helper() expectedName, expectedOffset := expected.Zone() actualName, actualOffset := actual.Zone() if expectedOffset != actualOffset { t.Errorf("Expected Zone '%s' with offset %d. Got Zone '%s' with offset %d", expectedName, expectedOffset, actualName, actualOffset) } }
Former US secretary of state stokes speculation of presidential campaign on New Hampshire visit with Democratic Senate candidate Jeanne Shaheen Hillary Clinton defended Democrats’ focus on women’s rights on Sunday, as she returned to her old redoubt of New Hampshire to help the party keep hold of a crucial US Senate seat and maintain the state’s all-female congressional delegation. In her first visit to the key presidential primary state since her 2008 campaign for the White House, the former US secretary of state also stoked speculation that she is readying another run in 2016, meeting voters at unannounced campaign stops. Appearing at a rally in Nashua alongside Senator Jeanne Shaheen, Clinton attacked Shaheen’s opponent, Scott Brown, for past votes opposing legislation that would guarantee women equal pay and coverage for contraception in their health insurance. “It is just astonishing to me that we’re having a debate about whether or not our country believes in equal pay for equal work,” said Clinton, noting that “Jeanne’s opponent is sort of on record dismissing this issue.” Asked last month by Fox News to respond to Shaheen’s persistent criticism of his record on “women’s reproductive rights and economic security”, Brown said: “Unfortunately I’m talking about issues that people care about.” Women working full-time in New Hampshire have median earnings of $41,542 a year, according to the American Association of University Women. This is 78% of the $52,954-a-year median earnings of men in the state. A similar ratio exists nationwide. Clinton went on to tell the crowd that they should make sure fellow New Hampshire voters understand that “at stake in this election is whether or not women have the right to make our own reproductive healthcare decisions”. New Hampshire Democrats have aggressively drawn attention to Brown’s co-sponsoring legislation, while previously representing Massachusetts in the Senate, to allow employers to decline on moral grounds to provide insurance covering contraception, as well as his vote against the Paycheck Fairness Act. Criticising opponents “who raise questions about ‘why do these Democrats go around talking about women’”, Clinton told a capacity crowd of 700 at a community college: “Women’s rights are on the frontier of freedom everywhere in the world.” Shaheen leads Brown in polls by 2.5 percentage points, according to a RealClearPolitics average. Defeat for her or congresswoman Carol Shea-Porter on Tuesday would end New Hampshire’s two-year-old all-women delegation in Washington. The state’s governor, Maggie Hassan, is expected to see off a challenge from the Republican Walt Havenstein. “I’m really proud to be part of the first all-female delegation to Congress,” said Shaheen, before introducing Clinton. “It shows how smart New Hampshire voters are. And there is nobody better to come here at this time, for our final push, than a woman who has been a role model for so many women across this country.” Clinton, who has been campaigning in several key congressional races for this year’s mid-terms, followed the rally in Nashua with unexpected get-out-the-vote stops at a bar in Manchester and then in Dover. Support in New Hampshire, renowned for being the “first in the nation” primary state, would be critical to a second presidential campaign. Noting that the birth in September of her first granddaughter, Charlotte, had made her “focused on the future,” she said: “You get this new life in your family, and you start thinking OK: what are we gonna do for her or for him, and what are we going to do to make sure that our country and the world is the place that it should be for our children and our grandchildren?” Clinton received a rapturous reception from supporters in the state where she rebounded from defeat in the Iowa caucuses in the 2008 Democratic presidential primary campaign, and where her husband, Bill, declared himself the “comeback kid” after a second-place finish in the 1992 primary on his way to the party’s nomination. Asked by Shaheen whether they were “ready for Hillary” – using the name of a campaign group that is raising money in preparation for an expected 2016 White House campaign – the crowd roared before chanting Clinton’s name. Clinton thanked supporters in the state for welcoming her and her husband since 1991. And in a rare reference to one of the lowest points of her first White House bid, she said: “In 2008, during the darkest days of my campaign, you lifted me up, you gave me my voice back, you taught me so much about grit and determination, and I will never forget that.” Despite winning the primary, Clinton went on to lose the nomination to then senator Barack Obama. Accusing Brown of being a “showpony” compared with Shaheen, whom she called a “workhorse”, Clinton appeared to join in Democratic criticism of Brown running to represent a state that has only latterly been his home. Clinton faced similar criticism from Republicans when running successfully for US senator from New York in 2000. Referencing a dispute in a televised debate last week in which Brown was accused of being uncertain about the location of a New Hampshire county, Clinton said: “Jeanne knows this state like the back of her hand. I’m not just talking about geography, although that does help.” Showing off her own apparent knowledge of the state, Clinton reeled off political issues related to specific cities, praising Shaheen’s record as she went. Brown, who lost his Massachusetts seat in 2012 to Senator Elizabeth Warren, would if victorious on Tuesday become the first person in 135 years to represent two different states in the US Senate. Pledging to make Brown a “historic figure”, Shaheen told the crowd: “We will make him the first person defeated running for the Senate in two different states by two different women”.