content
stringlengths
10
4.9M
Highly Stable Lasing from Solution‐Epitaxially Grown Formamidinium‐Lead‐Bromide Micro‐Resonators High‐quality epitaxial growth of oriented microcrystallites on a semiconductor substrate is demonstrated here for formamidinium lead bromide perovskite, by drop casting of precursor solutions in air. The microcrystallites exhibit green photoluminescence at room temperature, as well as lasing with low thresholds. Lasing is observed even though the substrate is fully opaque at the lasing wavelengths, and even though it has a higher refractive index as the perovskite active material. Moreover, the lasing is stable for more than 109 excitation pulses, which is more than what is previously achieved for devices kept in the air. Such highly stable lasing under pulsed excitation represents an important step towards continuous mode operation or even electrical excitation in future perovskite‐based devices.
def assert_paths(): def f(paths: list): for file in paths: assert os.path.exists(file) return f
/** * Helper method to check and update the cache for exchange rates. * * @param fromCurrency conversion from currency * @param toCurrency conversion to currency * @throws CurrencyNotSupportedException * @throws StorageException * @throws JSONException * @throws EndpointException * @throws ServiceException */ public void updateResource(Currency fromCurrency, Currency toCurrency) throws CurrencyNotSupportedException, StorageException, JSONException, EndpointException, ServiceException { if (!endpointFactory.checkRatesUsable(fromCurrency) || !endpointFactory.checkRatesUsable(toCurrency)) { JSONObject response = endpointFactory.sendLiveRequest(); diskStore.saveRates(response); endpointFactory.setExchangeRates(response); } else { endpointFactory.setExchangeRates(diskStore.loadRates()); } }
#include<bits/stdc++.h> using namespace std; int main() { string a; cin>>a; string b; cin>>b; int i=0; string h1,m1,h2,m2; h1=a[0]; h1+=a[1]; m1=a[3]; m1+=a[4]; h2=b[0]; h2+=b[1]; m2=b[3]; m2+=b[4]; int h=stoi(h1); int m=stoi(m1); int hh=stoi(h2); int mm=stoi(m2); int hour=hh-h; int min=mm-m; int diff=(hh-h)*60; diff+=(mm-m); diff=diff/2; int q=diff/60; int t=diff%60; int htime=h+q; int mtime=0; if(60-m-1<t) { htime++; mtime=t-(60-m); } else mtime=m+t; if (htime<=9) { cout<<"0"<<htime; } else cout<<htime; cout<<":"; if (mtime<=9) { cout<<"0"<<mtime; } else cout<<mtime<<endl; }
/** * Start operation request was accepted. */ @Immutable public class StartOperationRequestAuditEvent extends AbstractUserAuditEvent { public static class StartOperationAuditEventBuilder extends AbstractUserAuditEventBuilder<StartOperationRequestAuditEvent, StartOperationAuditEventBuilder> { /** * Request id */ private String requestId; /** * Reason of failure, if it is set, then the request is considered as failed */ private String reasonOfFailure; /** * Description of the request */ private String operation; /** * Target host of the request */ private String hostname; private StartOperationAuditEventBuilder() { super(StartOperationAuditEventBuilder.class); } /** * Appends to the audit event the identifier of the * operation through whcih the operation progress can be tracked. * * @param builder builder for the audit event details. */ @Override protected void buildAuditMessage(StringBuilder builder) { super.buildAuditMessage(builder); builder .append(", Operation(") .append(operation); if(hostname != null) { builder.append("), Host name(").append(hostname); } builder.append("), RequestId(") .append(requestId) .append("), Status(") .append(reasonOfFailure == null ? "Successfully queued" : "Failed to queue"); if (reasonOfFailure != null) { builder.append("), Reason(") .append(reasonOfFailure); } builder.append(")"); } /** * {@inheritDoc} */ @Override protected StartOperationRequestAuditEvent newAuditEvent() { return new StartOperationRequestAuditEvent(this); } /** * Sets the identifier of the operation through which the operation progress can be tracked. * * @param requestId the identifier of the operation through which the operation progress can be tracked. * @return this builder */ public StartOperationAuditEventBuilder withRequestId(String requestId) { this.requestId = requestId; return this; } public StartOperationAuditEventBuilder withReasonOfFailure(String reasonOfFailure) { this.reasonOfFailure = reasonOfFailure; return this; } public StartOperationAuditEventBuilder withOperation(String operation) { this.operation = operation; return this; } public StartOperationAuditEventBuilder withHostname(String hostname) { this.hostname = hostname; return this; } } private StartOperationRequestAuditEvent() { } /** * {@inheritDoc} */ private StartOperationRequestAuditEvent(StartOperationAuditEventBuilder builder) { super(builder); } /** * Returns an builder for {@link StartOperationRequestAuditEvent} * * @return a builder instance */ public static StartOperationAuditEventBuilder builder() { return new StartOperationAuditEventBuilder(); } }
High-Temperature Nodal Ring Semimetal in 2D Honeycomb-Kagome Mn2N3 Lattice The search for two-dimensional (2D) nodal ring semimetallic (NRSM) materials is a current research hotspot in spintronics, and designing a 2D nodal ring (NR) material with high Curie temperature (T C ) and strong robustness to spin-orbit coupling (SOC) is an even greater challenge. Here, based on first-principles calculations and symmetry analysis, we predict that 2D Mn2N3 is a NRSM with three energy band near the Fermi energy level consisting of electrons in the same spin channel. An electron-like energy band and two hole-like energy bands near the Fermi plane cross to form two NRs centered at the point Γ. Symmetry analysis shows that the spin-polarized NR semimetal is robust to SOC due to the conservation of horizontal mirror symmetry. Monte Carlo simulations further demonstrate that the T C of 2D Mn2N3 reaches 530 K, well above room temperature. Notably, the 2D Mn2N3 remains an NRSM on h-BN substrate. Our results not only reveal a general framework for designing 2D NR materials, but also promote further research in the direction of multifunctional quantum devices for spintronics.
Networks of phenotypic variation Evolution Evolution depends on phenotypic variation. Gene regulatory networks are theoretically expected to be likely sources of variation. Schaerli et al. explored how the structure of gene regulatory circuits contributes to the range of mutant phenotypes that are produced. Synthetic networks in the bacteria Escherichia coli were built to test this idea experimentally. In colonies responding to a gradient of a stimulatory chemical, the networks produced a striped pattern of cells. Ultimately, the structure of the networks was critical in specifying the phenotypes that could be obtained by mutation. Mol. Syst. Biol. 14 , e8102 (2018).
/** * This class provides a fixed-size immutable array with update operation. Setting an element to a * value returns a new array. */ public final class ImmutableArray<E> extends ImmutableArrayBase<E> implements Array<E> { private ImmutableArray(long sz) { super(sz); } @SuppressWarnings("rawtypes") static final private ImmutableArray emptyTuple = new ImmutableArray( 0); /** * Construct a new tuple of given size. */ @SuppressWarnings("unchecked") static public <E> ImmutableArray<E> create(long size) { if (size == 0) return emptyTuple; else return new ImmutableArray<E>(size); } private ImmutableArray(ImmutableArray<E> x, long i, E e) { super(x, i, e); } public E at(long i) { return super.get(i); } @Override public E get(long i) { return super.get(i); } public E get(Long i) { return at(i); } /** * returns a tuple whith the <i>i</i>th element replaced. */ public ImmutableArray<E> set(long i, E e) { return new ImmutableArray<E>(this, i, e); } public Seq<E> seq() { return SeqFromIterator.create(iterator()); } public ForwardIterator<E> iterator() { return new Collections.RandomAccessIterator<E>(this); } @Override public Object[] asMutableArray() { return super.asMutableArray(); } public boolean contains(E e) { return Collections.containsViaIterator(this, e); } public E find(E e) { return (Collections.findViaIterator(this, e)); } public long size() { return super.adressableSize(); } @Override public int hashCode() { return Collections.hashCodeForLists(this); } @Override public boolean equals(Object obj) { return Collections.equalsForList(this, obj); } @Override public String toString() { return Collections.toStringIterationOrder(this); } /** * TODO: Optimize in order to achieve linear instead of logarithmic time. */ static public <E> ImmutableArray<E> fromCollection( ImmutableCollection<? extends E> C) { ImmutableArray<E> tuple = create(C.size()); ForwardIterator<? extends E> it = C.iterator(); long i = 0; while (it.hasNext()) { tuple = tuple.set(i, it.next()); i = i + 1; } return tuple; } public Int elementCount() { return Int.create(size()); } @SuppressWarnings("unchecked") public ImmutableArray<E> clear() { return emptyTuple; } @Override public E first() { return seq().first(); } @Override public Seq<E> rest() { return seq().rest(); } @Override public boolean isEmpty() { return size() == 0; } public ImmutableArray<E> addAll(ImmutableCollection<E> rest) { ExtendibleArray<E> all = new ExtendibleArray<E>(); all.addAll(this); all.addAll(rest); return fromCollection(all.asConstant()); } @Override public Stream<E> stream() { return Collections.stream(this); } }
Get the biggest daily stories by email Subscribe Thank you for subscribing We have more newsletters Show me See our privacy notice Could not subscribe, try again later Invalid Email Work to turn a historic Liverpool library into a community centre will start within weeks after the project won £3.9m in Lottery cash. The Heritage Lottery Fund today confirmed the grant to the Andrew Carnegie Library in Tuebrook that will see the derelict building converted into a multi-purpose hub. The Lister Drive building will be called the Old Library and will include a café, a “healthy takeaway” and an events space. It will offer childcare services and volunteering opportunities to people in Tuebrook, West Derby, Stoneycroft and Old Swan. Today is an appropriate day for the announcement as it marks Andrew Carnegie’s 181st birthday. The Scottish-American became one of the richest men in the world through his success in the steel industry – and used some of his wealth to build thousands of libraries around the world. Some work has already been carried out to clear the building of mould and rot but this funding will allow the full rebuilding to start. Childcare charity Lister Steps is leading the restoration plan. Its boss Gaynor Williams said: “This is a bright new chapter for the area and one that we’re really excited to be involved in. “When Andrew Carnegie opened the library in 1905, he envisioned that it would be something that absolutely everyone in the area could benefit from and be proud of. Now, thanks to the Heritage Lottery Fund, Lister Steps are able to continue carrying out that vision. “It’s no secret that the area is one that has more than its fair share of hardship, with plenty of people who feel isolated and are struggling to make ends meet. The Old Library is going to create a sense of renewed community by offering people a chance to access honest, affordable services and get vital information about how to manage their money. “It will also be a place to help local entrepreneurs get their big ideas off the ground, as well as a community space for weddings, celebrations, concerts, theatre, conferences and local events. It truly is something for everyone.” The library was one of the buildings highlighted in the ECHO’s Stop the Rot campaign to save our city’s at-risk historic buildings. Ms Williams says the building will be called the Old Library because that’s what local people still call it. The new development has been designed by OMI Architects. Work will start next month and the new-look building will open its doors in November 2018. Nathan Lee, head of the Heritage Lottery Fund in the North West, said: “What a great way to mark Andrew Carnegie’s 181st birthday with our £3.9m investment – I think he would have approved. “We loved Lister Steps’ plans to restore this historic landmark into a living, breathing place designed around the needs of the local community. “This is a great example of National Lottery players’ money being used to maximum effect by taking a much-loved but dilapidated building and turning it into a space fit for 21st-century living.”
#-- GAUDI jobOptions generated on Sun Feb 7 02:07:34 2016 #-- Contains event types : #-- 15104201 - 151 files - 3002173 events - 906.80 GBytes #-- Extra information about the data processing phases: #-- Processing Pass Step-124834 #-- StepId : 124834 #-- StepName : Reco14a for MC #-- ApplicationName : Brunel #-- ApplicationVersion : v43r2p7 #-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py #-- DDDB : fromPreviousStep #-- CONDDB : fromPreviousStep #-- ExtraPackages : AppConfig.v3r164 #-- Visible : Y #-- Processing Pass Step-125836 #-- StepId : 125836 #-- StepName : Stripping20-NoPrescalingFlagged for Sim08 - Implicit merging. #-- ApplicationName : DaVinci #-- ApplicationVersion : v32r2p1 #-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py #-- DDDB : fromPreviousStep #-- CONDDB : fromPreviousStep #-- ExtraPackages : AppConfig.v3r164 #-- Visible : Y from Gaudi.Configuration import * from GaudiConf import IOHelper IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000002_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000003_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000004_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000005_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000007_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000008_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000009_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000010_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000011_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000012_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000013_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000014_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000015_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000016_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000017_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000018_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000019_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000020_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000021_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000022_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000023_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000024_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000025_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000026_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000027_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000028_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000029_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000030_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000031_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000032_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000033_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000034_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000035_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000036_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000037_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000039_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000040_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000041_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000042_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000043_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000045_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000046_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000047_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000048_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000049_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000050_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000051_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000052_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000053_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000055_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000056_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000058_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000059_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000060_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000061_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000062_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000063_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000064_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000065_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000066_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000067_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000068_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000069_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000071_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000072_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000073_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000074_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000075_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000076_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000077_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000078_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000079_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000080_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000081_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000082_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000083_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000084_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000085_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000087_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000088_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000090_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000091_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000092_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000093_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000094_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000095_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000096_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000097_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000098_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000099_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000100_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000101_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000102_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000103_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000105_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000106_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000107_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000108_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000109_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000110_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000111_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000112_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000113_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000114_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000115_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000116_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000117_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000118_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000119_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000120_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000121_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000122_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000123_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000124_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000125_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000126_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000127_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000128_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000129_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000130_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000131_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000132_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000133_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000134_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000135_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000136_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000137_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000138_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000139_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000141_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000142_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000143_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000144_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000145_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000146_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000147_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000148_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000149_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000150_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000151_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000152_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000153_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000154_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000155_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000156_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000158_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000159_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000160_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000161_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000162_2.AllStreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000163_2.AllStreams.dst' ], clear=True)
use std::cell::RefCell; // rust还不太熟,先抄着吧 use std::rc::Rc; // use std::cell::RefCell; fn main() { println!("Hello, world!"); } // Definition for a binary tree node #[derive(Debug, PartialEq,Eq)] pub struct TreeNode{ pub val:i32, pub left: Option<Rc<RefCell<TreeNode>>>, pub right: Option<Rc<RefCell<TreeNode>>>, } impl TreeNode { #[inline] pub fn new(val: i32) ->Self{ TreeNode{ val, left:None, right:None } } } type Node = Rc<RefCell<TreeNode>>; impl Solution { fn preorder(node:Option<Node>, target:i32) -> Option<Node>{ let mut stack = Vec::new(); let mut result = Vec::new(); stack.push(node); while let Some(node) = stack.pop(){ if let Some(node) = node{ if node.borrow().val == target{return Some(node.clone());}; result.push(node.clone()); stack.push(node.borrow().right.clone()); stack.push(node.borrow().left.clone()); } } None } pub fn search_bst(root:Option<Node>, val:I32) ->Option<Node>{ Self::preorder(root,val) } } impl Solution { pub fn search_bst(root: Option<Rc<RefCell<TreeNode>>>, val: i32) -> Option<Rc<RefCell<TreeNode>>> { if let Some(node) = &root{ if node.borrow().val == val{ return root; }else if node.borrow().val < val{ return Self::search_bst(node.borrow().right.clone(),val); } return Self::search_bst(node.borrow().left.clone(),val); }else{ return None; } } } impl Solution { pub fn search_bst(mut root:Option<Rc<RefCell<TreeNode>>>, val:i32) -> Option<Rc<RefCell<TreeNode>>>{ while let Some(node) = root{ if node.borrow().val < val{ root = node.borrow().right.clone(); }else if node.borrow().val > val{ root = node.borrow().left.clone(); }else { return Some(node); } } None } } impl Solution{ pub fn search_bst(root:Option<Rc<RefCell<TreeNode>>>,val:i32) -> Option<Rc<RefCell<TreeNode>>>{ if let Some(node) = &root{ if node.borrow().val < val{ return Self::search_bst(node.borrow().right.clone(),val); }else if node.borrow().val > val{ return Self::search_bst(node.borrow().left.clone(),val); } } root } }
<reponame>webdevelukas/klubwebsite import styled from "styled-components"; import NextImage from "next/image"; import useMediaQuery from "hooks/useMediaQuery"; type LightboxProps = { setShowLightbox: (showLightbox: boolean) => void; images: [{ url: string; alt: string; width: number; height: number }]; }; function Lightbox({ setShowLightbox, images }: LightboxProps) { const [fitsMediaQuery] = useMediaQuery("(min-width: 900px)"); return ( <Container> <Wrapper> <ImageCount>{images.length} Bilder</ImageCount> <button onClick={() => setShowLightbox(false)}>X Schließen</button> </Wrapper> <ImageGallery> {images.map((image, index) => ( <LightboxPicture key={index}> {!fitsMediaQuery && ( <NextImage src={image.url} alt={image.alt} layout="fill" objectFit="cover" /> )} {fitsMediaQuery && ( <NextImage src={image.url} alt={image.alt} layout="fill" objectFit="contain" /> )} </LightboxPicture> ))} </ImageGallery> </Container> ); } export default Lightbox; const ImageCount = styled.p` color: white; `; const Wrapper = styled.div` display: grid; grid-template-columns: 1fr auto; margin-bottom: 1rem; `; const ImageGallery = styled.div` display: grid; grid-auto-rows: auto; grid-auto-flow: row; grid-row-gap: 1rem; align-content: start; `; const LightboxPicture = styled.picture` position: relative; height: 60vmin; @media screen and (min-width: 900px) { height: 80vh; } `; const Container = styled.div` background: rgba(0, 0, 0, 0.9); z-index: 4000; position: fixed; top: 0; bottom: 0; left: 0; right: 0; padding: 2rem 1rem; overflow-y: scroll; `;
White House counselor Kellyanne Conway on Monday morning confirmed that President Donald Trump’s personal attorney John Dowd wrote the President’s Saturday tweet saying that he fired Michael Flynn as national security adviser because he lied to Vice President Mike Pence and the FBI. “I was with the President on Saturday all day, frankly, and I know that what Mr. Dowd says is correct. What he says is that he put it together and sent it to our director of social media,” Conway said on “Fox and Friends.” She said that it’s common for Trump’s lawyers to craft his tweets. “The lawyers are the ones that understand how to put those tweets together,” she said. Trump appeared to reveal that he knew Flynn lied to the FBI when he tweeted on Saturday morning that he fired Flynn “because he lied to the Vice President and the FBI.” Dowd then told the Washington Post on Sunday that he wrote the tweet, but said it was poorly worded. Dowd claimed that then-Acting Attorney General Sally Yates suggested to White House Counsel Don McGhan in late January that Flynn made comments to the FBI that were similar to his incorrect comments made to Pence about calls with the Russian ambassador.
<filename>test/contrib/test_pyopenssl.py # -*- coding: utf-8 -*- import os import mock import pytest try: from cryptography import x509 from OpenSSL.crypto import FILETYPE_PEM, load_certificate from urllib3.contrib.pyopenssl import _dnsname_to_stdlib, get_subj_alt_name except ImportError: pass def setup_module(): try: from urllib3.contrib.pyopenssl import inject_into_urllib3 inject_into_urllib3() except ImportError as e: pytest.skip("Could not import PyOpenSSL: %r" % e) def teardown_module(): try: from urllib3.contrib.pyopenssl import extract_from_urllib3 extract_from_urllib3() except ImportError: pass from ..test_util import TestUtilSSL # noqa: E402, F401 from ..with_dummyserver.test_https import ( # noqa: E402, F401 TestHTTPS, TestHTTPS_IPV4SAN, TestHTTPS_IPv6Addr, TestHTTPS_IPV6SAN, TestHTTPS_NoSAN, TestHTTPS_TLSv1, TestHTTPS_TLSv1_1, TestHTTPS_TLSv1_2, TestHTTPS_TLSv1_3, ) from ..with_dummyserver.test_socketlevel import ( # noqa: E402, F401 TestClientCerts, TestSNI, TestSocketClosing, TestSSL, ) class TestPyOpenSSLHelpers(object): """ Tests for PyOpenSSL helper functions. """ def test_dnsname_to_stdlib_simple(self): """ We can convert a dnsname to a native string when the domain is simple. """ name = u"उदाहरण.परीक" expected_result = "xn--p1b6ci4b4b3a.xn--11b5bs8d" assert _dnsname_to_stdlib(name) == expected_result def test_dnsname_to_stdlib_leading_period(self): """ If there is a . in front of the domain name we correctly encode it. """ name = u".उदाहरण.परीक" expected_result = ".xn--p1b6ci4b4b3a.xn--11b5bs8d" assert _dnsname_to_stdlib(name) == expected_result def test_dnsname_to_stdlib_leading_splat(self): """ If there's a wildcard character in the front of the string we handle it appropriately. """ name = u"*.उदाहरण.परीक" expected_result = "*.xn--p1b6ci4b4b3a.xn--11b5bs8d" assert _dnsname_to_stdlib(name) == expected_result @mock.patch("urllib3.contrib.pyopenssl.log.warning") def test_get_subj_alt_name(self, mock_warning): """ If a certificate has two subject alternative names, cryptography raises an x509.DuplicateExtension exception. """ path = os.path.join(os.path.dirname(__file__), "duplicate_san.pem") with open(path, "r") as fp: cert = load_certificate(FILETYPE_PEM, fp.read()) assert get_subj_alt_name(cert) == [] assert mock_warning.call_count == 1 assert isinstance(mock_warning.call_args[0][1], x509.DuplicateExtension)
Object languages in a type-theoreticmeta-frameworkPaul This paper concerns techniques for providing a convenient syntax for object languages implemented via a type-theoretic Logical Framework, and reports on work in progress. We rst motivate the need for a type-theoretic logical framework. Firstly, we take the logical framework seriously as a metalanguage for implementing object languages (including object type theories). Another reason is the goal of building domain-speciic reasoning tools which are implemented using type theory technology but do not require great expertise in type theory to use productively. We then present several examples of bi-directional translations between an encoding in the framework language and a more convenient syntax. The paper ends by discussing several techniques for implementing the translations and properties that we may require for the translation. Coercive subtyping is shown to help in the translation.
About How many times have you gone out with your geeky friends to a local bar, only to be surrounded by sports on every screen? Ever try to ask someone to put on an episode of Deep Space 9 or Dark Matter? They'd look at you like you had two heads! The looks you get when you break out your Magic cards in those places... Well, we decided to do something about that. My name is Daniel Jackson, and my wife, Bri, and I want to BUILD that place for geeks to come and gather! Comic & Sci-fi conventions are way too few and far between. We've always wished there was a place to go -- a year-round convention, so to speak -- with geeky activities always afoot. A place to meet others like us or a common place to meet up with internet friends. Really, how cool would it be to test my obscure, encyclopedic knowledge of the 'Evil Deal' trilogy and win my bar tab? Or watch Bri recite 'Return of the Jedi' in its entirety on-stage, Artoo's beeps and all? (It's actually pretty amazing). So... Coming in early 2018, Midway Station will proudly serve as the geek hub of Richmond, Virginia. Located just off of 6th and Cary Street, Midway Station will offer a highly realized and immersive 3,000sqft science fiction environment for geeks of all ages to socialize, eat, drink, play games and shop! And that's just the beginning! Constructed as a space station serving intergalactic travelers, the design of Midway Station is completely original, with familiar science fiction/fantasy elements. The environment will be fully immersive...in fact, guests will step through a Milky Way Stargate to enter the station, and turn around to see that they've actually traveled to the Pegasus Galaxy! Everything from the lighting, industrial paneling, ambient background ship noises (Ten Forward anyone?), to the main view screen showing star fields slowly passing by, is designed to to make guests feel as though they really have been transported light years away. Concept Artwork In order to accommodate travelers of all ages, the atmosphere of the station will be dynamic, slowly transitioning from an all-ages cafe throughout the day, to an 18 & up lounge later in the evening. Guests can enjoy geeky cocktails while playing console games or watching sci-fi or fantasy movies and series. There will also themed non-alcoholic beverages (How long have you wanted to try a glass of Blue Milk?), as well as a professional bakery filled with fun and creative geeky pastries and snacks. We are proud to announce that we will be serving exclusive themed pizzas from Pepicelli's in the neighboring town of Ashland, VA! "Pizza the Hutt" is the first exclusive: a delicious mountain of pizza toppings, sauce, and crust, featuring the likeness of everyone's favorite vile, stomach turning gangster from 'Spaceballs'! Serves six! Midway Merchants is a shop, where guests can buy toys, comics, board and table top gaming supplies, geeky apparel, replica movie props and MUCH more! We will also be hosting merchandise from different local vendors from time to time. While visiting Midway Station, guests will able able to explore our museum of screen-used props (Currently taking over our living room...) from popular sci-fi franchises which will be on display throughout the station. Also, there will be a number of geeky easter eggs all around the station (and kids can pick up a scavenger hunt sheet to try to earn a free treat!). A SMALL portion of the prop collection... In fact, there will ALWAYS be something fun and exciting going on at Midway Station... -Science Fiction & Fantasy films and tv series playing ALL day, EVERY day! -A library of novels, comic books, & graphic novels to both borrow and purchase! Grab some reading material, order a beverage, have a seat, and enjoy! -Tabletop and board games (again, to borrow OR purchase!) -Console gaming, including Playstation, Xbox & Nintendo Switch -Trivia nights (with PRIZES!!) -Costume Contests -Open Mic Stand-Up Comedy -Celebrity Guest Apperances And MUCH MORE... Midway Station will offer a private room with a HUGE gaming/conference table, a minibar and more! This room is available for rent (private parties and events). We will also offer birthday party packages for kids. CAPT. DANIEL JACKSON [ME] I’ve been a HUGE Sci-Fi fan all of my life and have always had a knack for making art and building things. For the past 5 years, I have been working as a professional action figure customizer and owner of Kawoosh Customs. I have made custom toys for fans from all over the world, and even a few of my favorite sci-fi actors. You can check out more of my custom work here. Some of you may remember my wife, Bri, and I from our wedding at the Creation Entertainment Official Stargate Convention (https://www.creationent.com/cal/sgchi.htm) in 2016. Eric Avari reprised his role of Kasuf from Stargate, surprising us with a blessing in ABYDONIAN! Now that I've established my geek and artistic cred... In addition, I have over a decade of experience in operations/business management and visual merchandising, as well as three years of restaurant experience. DEVERICK STRAND - GENERAL MANAGER Dev, working with his other passion, helping children. Deverick developed his love of science fiction at the early age, through Star Trek: The Next Generation. This ignited a passion for all things science fiction, including all iterations of Star Trek, Star Wars & Stargate. He is also a PC & Console Gamer and is a fan of the steampunk aesthetic. Besides having prior experience in both the retail and restaurant industries, he has spent the last 17 years of his career as a classroom teacher. If he can mange 150+ preteens, he can handle anything! We are so proud to bring Deverick aboard as a part of the Midway Station crew. BRIANNE “BRI” JACKSON - "MASTER OF COIN" Bri as Kahlan Amnell, mother confessors from "Legend of the Seeker" Bri's love of science fiction began when she was just a baby. As a young, fussy child, it wasn't a pacifier that soother her crying, but Star Wars. Since then, she has made her love of science fiction and fantasy part of her entire life. Bri is a fan of all things geek: The "Stars": Wars, Trek & Gate, Harry Potter, Lord of the Rings, The Dark Crystal, The Labyrinth, The Beastmaster, True Blood, the list goes on and on. Bri is also an avid reader and writer, having written a 1,000,000+ word Stargate Fan fiction. You can check it out here. Currently, Brianne is an Instructional Designer at Virginia Commonwealth University, where she is completing her Ph.D. in education. As a researcher, Bri is a statistician, with additional experience running her own tutoring company, so we are very happy that she will be keeping our books! NAIM - MASTER OF TASTY TREATS Our baker, Naim, geeking it up! Naim is the super-talented owner of "Oh My! Cheesecake", a bakery business also in Richmond, Va. Naim is a huge geek, being a fan of everything from Marvel Comics, to Spaceballs, to Star Wars. Naim has been working to develop a number of recipes that will best represent our geeky fandoms at Midway Station. To preview some of his amazing work, please check out his website here. RONNIE BAILEY - SHOPKEEPER/GUEST RELATIONS Ronnie - The man with the stuffs I've had the pleasure of working with Ronnie for the past few years and I am so excited have him aboard the station. Not only does Ronnie have a number of years in customer service, he is one of the nicest geeks that you will meet! Ronnie loves console gaming, Mass Effect, as well as the Matrix series. Maybe we should call him Neo? PEPPER Because what self-respecting space station would be complete without droids? Pepper will be the hostess of Midway Station. She will greet guests when they arrive, entertain them, and serve as an interactive menu and tour guide throughout the station. Pepper is super-friendly and loves dancing and taking photos with humans. To see Pepper in action, please visit her maker's page Softbank Robotics: (https://www.ald.softbankrobotics.com/en/press/gallery/pepper). JIBO Jibo, our 2nd droid (unless my wife steals him for our house...) Jibo will serve as the Captain's personal assistant, and will be seen all around the station. Jibo will help control the atmosphere of the station (lighting, music, etc.) and will entertain guests by playing games, telling jokes, or sharing fun and interesting facts. To see Jibo in action, visit his website (https://www.jibo.com/). Da, da, da, da, da, da, da, da, da.... The 8th Kingdom Concept Art The one kingdom the Dragon Queen and Cersei are NOT fighting over, the 8th kingdom is a medieval fantasy themed "Holosuite Program" on Midway Station, with it's own themed games, food and drink menu. Come share a bottle of mead, enjoy rustic pastries, and play your favorite tabletop RPGs and console games in a mystical, otherwordly setting. The crew at Midway Station wishes to celebrate the achievements of local students who excel in geekery. We're looking for spelling bee champs, science fair winners, Odyssey of the Mind teams, etc. We will be accepting all examples of scientific and geek awesomeness to proudly display on our website and on the wall of the cafe. Also, each geek of the week will receive a free treat! We chose the Kickstarter platform because science fiction is a family. Rather than solely rely on bank loans and the investors that we are working with, we wanted to make each and every one of you a part of the Midway Station family. The money outlined in this Kickstarter, from Bri's careful budgeting, will cover about half of what we need to make Midway Station a reality (the other half will be covered by investors). This includes paying the crew to build and set up the station, start-up inventory and supplies, smart tech, droids, and all necessary licensing. However, the more support you give us, the bigger and more captivating we can the experience at Midway Station for our guests. Outside of the money we are asking for here, our Stretch Goals will help us add more entertainment and depth to the station. Some of our goals include: - Purchasing more screen-used props & costumes for the museum. We are working with the owners of Antiquities, an amazing costume & prop shop in Las Vegas, to secure some of the most iconic artifacts in cinematic history for our sci-fi museum. -Having VR systems available to play at our cafe -Expanding our space to include an arcade And that's just the beginning! We are also planning a stretch goal party if we meet those goals! Although all pledges are greatly appreciated, you don't have to contribute financially to help. You can help by sharing this everywhere: Facebook, Instagram, Twitter, Snapchat, Email and word of mouth! EVERY little bit helps! Exclusive Founders/Backers ONLY Decal Exclusive Founders/Backers ONLY Patch Exclusive Founders/Backers ONLY T-Shirt Is 100K enough to cover these seemingly intricate construction expenses? Kickstarter is not the only source of income as we have investors, as well as a number of construction items already obtained prior to launching the Kickstarter. Is it truly feasible to launch in January with this level of customization? Our goal is to open in January of 2018. We have created a number of contingency plans to account for any possible delays in licensing and construction and will keep ALL backers abreast of the timeline. If you don't meet your goal - is the project dead? Do you have a plan B? The project will move forward, even if it is delayed from not being fuly funded through Kickstarter. In fact, we do have a plan B. We chose the Kickstarter platform to generate buzz within the Science Fiction community outside of the Richmond area. In the event we do not meet our Kickstarter goal, we will revisit our plans with the investors and the building owner. Are the only sources of income from sales of drinks, snacks, etc.? No. As you can see, we will have a number of special ticketed events that will also generate income for the cafe (celebrity photo/autograh events, movie premiers, contests, etc.). Further, the 8th Kingdom and the private party room will be available for rentals (A HIGH need in the growing Richmond, VA community). We have comprehensive estimates of both income as well as expenses, however, due to privacy concerns are not sharing this information on the public platform. However, if an individual would like to discuss being an investor in the project we can provide those figures. How many employees will be needed? We are beginning with the crew, as you have seen listed, as well as 3 part-time employees. What is the general level of interest in such a venture? Richmond, VA is a Fortune 500 city, with a growing market for restaurants and party spaces. In working with the RVA Small Business administration, we have identified such a venture to be one of high interest in our area. As mentioned in the Kickstarter, we will be working with other local businesses to make our cafe part of the growing Richmond community by cross-promoting. What is your marketing strategy? As Richmond IS such a growing community, the Richmond Small Business Association, along with local magazines and radio, offers support to new small businesses in the area. Also, as an employee of one of the largest universities on the East Coast, Bri will be working closely to partner with the student community to ensure continued success. But, I only can afford $10 or $20. How does that help? Science fiction is a family. We want EVERYONE to be a part of this. If every SciFi fan that saw this Kickstarter backed us for only $5 or $10, we would reach our goal in a day. NO contribution is too small. And we will work hard to ensure a fantastic return on investment, at any scale. Good journey my friends... --Daniel Jackson
// Adds two linked lists of same size represented by head1 and head2 and returns // head of the resultant linked list. Carry is propagated while returning from // the recursion node* addSameSize(node* head1, node* head2, int* carry) { if (head1 == NULL) return NULL; int sum; node* result = (node *)malloc(sizeof(node)); result->next = addSameSize(head1->next, head2->next, carry); sum = head1->data + head2->data + *carry; *carry = sum / 10; sum = sum % 10; result->data = sum; return result; }
import requests, json, base64, time, threading, sys, cv2 import numpy as np from flask import Flask, render_template, send_file, request, redirect # Configuration Variables server_url = "http://localhost:8194" # the URL the Slackbot server is running on # Test low_battery without an image print("Testing /low_battery without an image...", end='') dict_to_send = {'battery_pct':25} res = requests.post(server_url+'/low_battery', json=dict_to_send) print("Got response JSON from /low_battery: %s" % res.json()) # Load an image with open("../imgs/test_image_0.jpg", "rb") as f: image_content = f.read() image_to_send = base64.encodebytes(image_content).decode('ascii') # Test low_battery with an image print("Testing /low_battery with an image...", end='') dict_to_send = {'battery_pct':15, 'image':image_to_send} res = requests.post(server_url+'/low_battery', json=dict_to_send) print("Got response JSON from /low_battery: %s" % res.json()) # Load another image with open("../imgs/test_image_1.jpg", "rb") as f: image_content = f.read() image_to_send = base64.encodebytes(image_content).decode('ascii') # Test where_am_i print("Testing /where_am_i...", end='') dict_to_send = {'image':image_to_send, 'options':['Lounge', "Office#252", "200 Corridoor", "Atrium"]} res = requests.post(server_url+'/where_am_i', json=dict_to_send) print("Got response JSON from /where_am_i: %s" % res.json()) where_am_i_message_id = res.json()['message_id'] # Sleep to enable the user to respond print("Sleeping for 15 seconds. In this time, respond to the Slack message.") sleep_secs = 15 time.sleep(sleep_secs) # Test get_updates without message_ids print("Testing get_updates without message_ids...", end='') dict_to_send = {} res = requests.post(server_url+'/get_updates', json=dict_to_send) print("Got response JSON from /get_updates: %s" % res.json()) # Test get_updates with message_ids and a timestamp of 0 print("Testing get_updates with a message_id and timestamp of 0...", end='') dict_to_send = {'message_ids_and_action_ts' : {where_am_i_message_id:0}} res = requests.post(server_url+'/get_updates', json=dict_to_send) print("Got response JSON from /get_updates: %s" % res.json()) if where_am_i_message_id in res.json()['message_id_to_responses']: where_am_i_latest_action_ts = float(res.json()['message_id_to_responses'][where_am_i_message_id][-1][0]) # Sleep to enable the user to respond print("Sleeping for 5 seconds. In this time, respond to the Slack message if applicable (e.g., you clicked 'Other').") sleep_secs = 5 time.sleep(sleep_secs) # Test get_updates with message_ids and a timestamp of where_am_i_latest_action_ts print("Testing get_updates with a message_id and timestamp of %f..." % where_am_i_latest_action_ts, end='') dict_to_send = {'message_ids_and_action_ts' : {where_am_i_message_id : where_am_i_latest_action_ts}} res = requests.post(server_url+'/get_updates', json=dict_to_send) print("Got response JSON from /get_updates: %s" % res.json())
/** * The forked computation. * <p> * The resulting index always includes the position to the end-of-file * (EOF). * </p> * * @return a Sorted set of positions representing a start of line. */ @Override protected SortedSet<Long> compute() { SortedSet<Long> index = new TreeSet<Long>(); try { if (length < threshold) { BufferedRandomAccessFile raf = null; try { raf = new BufferedRandomAccessFile(file, "r"); raf.seek(start); if (raf.getFilePointer() == 0L) { index.add(Long.valueOf(raf.getFilePointer())); } while (raf.getFilePointer() < end) { raf.getNextLine(); index.add(Long.valueOf(raf.getFilePointer())); } } finally { if (raf != null) { raf.close(); } } } else { long start1 = start; long end1 = start + (length / 2); long start2 = end1; long end2 = end; IndexingTask task1 = new IndexingTask(file, start1, end1, threshold); task1.fork(); IndexingTask task2 = new IndexingTask(file, start2, end2, threshold); index.addAll(task2.compute()); index.addAll(task1.join()); } } catch (IOException ex) { throw new FileIndexingException(file, ex); } return index; }
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Custom auxiliar modules for SETA_gophish app import sys import csv import configparser import time import os from datetime import datetime from gophish import Gophish from gophish.models import (Campaign, Group, Page, SMTP, Template, User) def initialize_connector(): ''' Gophish API connector Get SETAphish.cfg file and read parameters. Check file in /config for details and examples. ''' global api config = configparser.ConfigParser() thisfolder = os.path.dirname(os.path.abspath(__file__)) cfgfile = os.path.join(thisfolder, 'config', 'SETAphish.cfg') try: config.read(cfgfile) except Exception as e: return('ERROR: Config file not found') api_key = config.get('MAIN','api_key') host_url = config.get('MAIN','host_url') verify = config.get('MAIN','verify') api = Gophish(api_key, host_url, verify=eval(verify)) return def create_group_(args): ''' Pishing group of users creation Create a new group of users to be phished or trained input file format: CSV delimited by comma, no headers, one line per user, four fields (First Name, Last Name, Email, Position) Input: list with Group name, CSV File Return: object reference (Gophish Group) if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) name, file = args users = [] fields = ['First Name', 'Last Name', 'Email', 'Position'] try: reader = csv.DictReader(file, fields) for row in reader: if row: users.append(create_user(row)) group = Group(name=name, targets=users) except(FileNotFoundError): msg = ('ERROR: no such file: {}'.format(filename)) return(None, msg) try: group = api.groups.post(group) return(group, 'OK') except: return(None, 'ERROR: failed to create target group (possible duplicate or file format is wrong)') def create_user(row): ''' Gophish User creation Convert list item into gophish user type. Input: CSV line Return: reference to new gophish User object. ''' return (User(first_name=row['First Name'], last_name=row['Last Name'], email=row['Email'], position=row['Position'])) def list_groups_(): ''' List Gophish user groups Print target groups and high level details Input: none Return: list of Groups if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: groups = api.groups.get() return(groups, 'OK') except: return(None, 'ERROR: failed to retrieve target groups') def get_detail_group_(id): ''' Get Gophish user group detail Retrieve group details Input: GroupId in Gophish database Return: object reference if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: group = api.groups.get(group_id=id) return(group, 'OK') except: return(None, 'Error: failed to retrieve target group {}. Generic error or non existent'.format(id)) def remove_group_(id): ''' Remove Gophish user group detail Input: GroupId in Gophish database Return: message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(msg) try: group = api.groups.delete(group_id=id) return('OK') except: return('ERROR: failed to remove the target group {}. Generic error or non existent'.format(id)) def create_email_template_(args): ''' Gophish email template creation Create a new template email for phishing campaigns Input: list with template name (string), email subject (string), file template (html) Return: reference to template objectif applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) template_name, email_subject, file = args try: html = file.read() template = Template(name=template_name, subject=email_subject, html=html) except(FileNotFoundError): return(None, 'ERROR: no such file {}'.format(filename)) try: template = api.templates.post(template) except: #possible source code bug return(None, 'ERROR: failed to create email template (possible duplicate)\n') return(template, 'OK') def list_email_templates_(): ''' Gophish list email templates List email templates for phishing campaigns Input: None Return: list of Email template objects or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: templates = api.templates.get() return(templates,'OK') except: return(None, 'ERROR: failed to retrieve templates') def get_detail_email_template_(id): ''' Get Gophish email template detail Retrieve email template properties Input: TemplateId in Gophish database Return: object reference if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: template = api.templates.get(template_id=id) return(template, 'OK') except: return(None, 'ERROR: failed to retrieve email template {}. Generic error or non existent\n'.format(id)) def remove_email_template_(id): ''' Remove Gophish email template Input: TemplateId in Gophish database Return: message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(msg) try: template = api.templates.delete(template_id=id) return('OK') except: return('ERROR: failed to remove the email template ID {}. Generic error or non existent\n'.format(id)) def create_landing_page_template_(args): ''' Gophish landing page template creation Create a new landing page for phishing campaigns Input: list with template name (string), file template (html), capturing credentials [0 (no capture), 1 (yes) or 2 (also capture pass)], redirect_URL (string url) Return: reference to template object if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) template_name, file, capturing, redirect_url = args capture_credentials = False capture_passwords = False if capturing in ["1","2"]: capture_credentials = True if capturing == "2": capture_passwords = True try: html = file.read() landing_page = Page(name=template_name, html=html, capture_credentials = capture_credentials, \ capture_passwords = capture_passwords, redirect_url = redirect_url) except(FileNotFoundError): return(None,'ERROR: no such file {}\n'.format(filename)) try: page = api.pages.post(landing_page) return(page, 'OK') except: return(None, 'ERROR: failed to create landing page template (possible name duplicate)\n') def list_landing_page_templates_(): ''' Gophish list landing pages List landing pages for phishing campaigns Input: None Return: list of landing page template objects or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: landing_pages = api.pages.get() return(landing_pages,'OK') except: return(None, 'ERROR: failed to retrieve templates') def get_landing_page_template_(id): ''' Get Gophish landing page detail Retrieve landing page properties Input: Landing page Id in Gophish database Return: object reference if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: page = api.pages.get(page_id=id) return(page, 'OK') except: return(None, 'ERROR: failed to retrieve landing page template {}. Generic error or non existent\n'.format(id)) def remove_landing_page_template_(id): ''' Remove Gophish landing page template Input: landing page template Id in Gophish database Return: template Id (string) and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: page = api.pages.delete(page_id=id) return(id, 'OK') except: return(None, 'ERROR: failed to remove the landing page template ID {}. Generic error or non existent\n'.format(id)) def create_sending_profile_(args): ''' Gophish SMTP sending profile creation Create a new email profile for phishing campaigns Input: list with <PROFILE_NAME>: string with a given name for profile <HOST> example: smtp.gmail.com <SMTP_USERNAME> example: <EMAIL> <SMTP_PASS> self-explanatory <FROM_USERNAME> example: <NAME> <FROM_EMAIL_ADDRESS> example: <EMAIL> <IGNORE CERT ERRORS (0 or 1): ignore SSL errors (1) or not (0) Return: reference to template object if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) profile_name, host, smtp_username, smtp_pass, from_username, from_email_address, ignore_errs = args #by default assume SSL self-signed certificate if ignore_errs == '0': ignore_errs = False else: ignore_errs = True smtp = SMTP(name=profile_name, host=host, from_address = from_username+' <'+from_email_address+'>', username=smtp_username, password=smtp_pass, ignore_cert_errors=ignore_errs) try: smtp = api.smtp.post(smtp) return(smtp, 'OK') except: return(None, 'ERROR: failed to create sender profile (possible duplicate name or invalid email)\n') def list_sending_profile_templates_(): ''' Gophish list sending SMTP profiles List SMTP profiles for phishing campaigns Input: None Return: list of SMTP profile objects or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: sending_profiles = api.smtp.get() return(sending_profiles, 'OK') except: return(None, 'ERROR: failed to retrieve SMTP profiles\n') def get_smtp_sending_profile_(id): ''' Get Gophish SMTP sending profile detail Retrieve sending profile properties Input: Profile Id in Gophish database Return: object reference if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: profile = api.smtp.get(smtp_id=id) return(profile, 'OK') except: return(None, 'ERROR: failed to retrieve landing page template {}. Generic error or non existent\n'.format(id)) def remove_smtp_sending_profile_(id): ''' Remove Gophish SMTP sending profile template Input: profile Id in Gophish database Return: profile Id (string) or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: profile = api.smtp.delete(smtp_id=id) return(id, 'OK') except: return(None, 'ERROR: failed to remove the SMTP sending profile ID {}. Generic error or non existent\n'.format(id)) def create_campaign_(args): ''' Gophish phishing campaign creation Create a new phishing campaign Input: list with <CAMPAIGN_NAME> name for the new object campaign <GROUP_NAME> user group name selected for the campaign <EMAIL_NAME> email template name selected for the campaign <LANDING_PAGE_NAME> landing page name selected for the campaign <SMTP_PROFILE_NAME> sending profile name selected <PHISH_URL> URL to send phished users for typing credentials <LAUNCH_TIME> optional. Format: dd/mm/yyyy@hh:mm for scheduled launching or blank for inmediate Return: reference to campaign object if applies or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) launch_time = 'now' if len(args) == 7: campaign_name, group_name, email_name, page_name, profile_name, phishing_url, launch_time = args else: campaign_name, group_name, email_name, page_name, profile_name, phishing_url = args groups = [Group(name=group_name)] page = Page(name=page_name) template = Template(name=email_name) smtp = SMTP(name=profile_name) #checking date format and vigency if launch_time == 'now': campaign = Campaign( name=campaign_name, groups=groups, page=page, template=template, smtp=smtp, url=phishing_url, ) else: try: start_timestamp = datetime.strptime(launch_time, '%d/%m/%Y@%H:%M') except ValueError: return(None, 'ERROR: failed to convert timestamp (possible missformat?)\n') if start_timestamp > datetime.now(): launch_date = start_timestamp.isoformat()+'Z' else: return(None, 'ERROR: Launching campaign time provided is in the past!\n') campaign = Campaign( name=campaign_name, groups=groups, page=page, template=template, smtp=smtp, url=phishing_url, launch_date=launch_date ) try: #duplicated campaign names allowed in here new_campaign = api.campaigns.post(campaign) return(new_campaign, 'OK') except: return(None, 'ERROR: failed to create Campaign (possible non-existent objects?)\n') def list_campaigns_(): ''' Gophish list phishing campaigns List existing phishing campaigns Input: None Return: list of phishing campaign objects or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: campaigns = api.campaigns.summary() for campaign in campaigns.campaigns: pass return(campaigns, 'OK') except: return(None, 'ERROR: failed to retrieve phishing campaigns\n') def get_campaign_details_(id): ''' Get Gophish phishing campaign details Retrieve phishing campaign properties Input: Campaign Id in Gophish database Return: object reference if applies (summary and details) or None, None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, None, msg) try: summary = api.campaigns.summary(campaign_id=id) details = api.campaigns.get(campaign_id=id) return(summary, details, 'OK') except: return(None, None, 'ERROR: failed to retrieve phishing campaign details or ID non-existent\n') def end_campaign_(id): ''' End Gophish campaign Set the campaign to COMPLETED without removing it from database Input: Campaign Id in Gophish database Return: Campaign sumary or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) #cancelled campaigns are not accesible. Cancellable status are all but Completed. try: summary = api.campaigns.summary(campaign_id=id) if summary.status == 'Completed': return(None, 'ERROR: selected campaign status ({}) can not be completed\n'.format(summary.status)) else: api.campaigns.complete(campaign_id=id) return(summary, 'OK') except: return(None, 'ERROR: failed to retrieve phishing campaign details or ID non-existent\n') def remove_campaign_(id): ''' Remove Gophish campaign Input: Campaign Id in Gophish database Return: Campaign sumary or None, and message indicating OK or specific error (string). ''' msg = initialize_connector() if msg is not None: return(None, msg) try: summary = api.campaigns.summary(campaign_id=id) api.campaigns.delete(campaign_id=id) # print('Success: phishing campaign {} ({}) removed'.format(summary.id, summary.name)) return(summary, 'OK') except: return(None, 'ERROR: failed to remove phishing campaign or non-existent\n')
An investigation of the field-induced ferrielectric subphases in antiferroelectric liquid crystals We report results of detailed investigations of the dielectric response of two antiferroelectric liquid crystal materials over a wide range of frequencies as a function of bias voltage and of temperature. On the basis of extensive measurements of the tilt angle, the spontaneous polarization, and the optical transmittance as a function of voltage, we conclude that there appears to exist a previously undetected, new field-induced phase (referred to as the X phase) with uniform monodomain structure. This new phase seems to be stable over a wide range of applied voltages and possesses effective values of spontaneous polarization and tilt angle within 70-80% of their maximum values. Although the results appear to rule out the presence of the SmCalpha * phase, nevertheless, the new phase has some characteristics similar to those of the SmCalpha * phase. The differences between the characteristics of the two phases are detailed.
<reponame>PyroTechniac/Skyra import { SkyraCommand, SkyraCommandOptions } from '@lib/structures/SkyraCommand'; import { CLIENT_ID } from '@root/config'; import { ApplyOptions } from '@skyra/decorators'; import { assetsFolder } from '@utils/constants'; import { fetchAvatar, radians } from '@utils/util'; import { Canvas } from 'canvas-constructor'; import { readFile } from 'fs-nextra'; import { KlasaMessage, KlasaUser } from 'klasa'; import { join } from 'path'; @ApplyOptions<SkyraCommandOptions>({ bucket: 2, cooldown: 30, description: language => language.tget('COMMAND_CHASE_DESCRIPTION'), extendedHelp: language => language.tget('COMMAND_CHASE_EXTENDED'), requiredPermissions: ['ATTACH_FILES'], runIn: ['text'], spam: true, usage: '<user:username>' }) export default class extends SkyraCommand { private KTemplate: Buffer | null = null; public async run(message: KlasaMessage, [user]: [KlasaUser]) { const attachment = await this.generate(message, user); return message.channel.send({ files: [{ attachment, name: 'chase.png' }] }); } public async generate(message: KlasaMessage, user: KlasaUser) { let chased: KlasaUser; let chaser: KlasaUser; if (user.id === message.author.id && this.client.options.owners.includes(message.author.id)) throw '💥'; if (user === message.author) [chased, chaser] = [message.author, this.client.user!]; else if (this.client.options.owners.concat(CLIENT_ID).includes(user.id)) [chased, chaser] = [message.author, user]; else [chased, chaser] = [user, message.author]; const [chasedAvatar, chaserAvatar] = await Promise.all([ fetchAvatar(chased, 128), fetchAvatar(chaser, 128) ]); return new Canvas(569, 327) .addImage(this.KTemplate!, 0, 0, 569, 327) .setTransform(-1, 0, 0, 1, 0, 0) // Draw chased avatar .save() .translate(-144, 51) .rotate(radians(16.12)) .addCircularImage(chasedAvatar, 0, 0, 26) .restore() // Draw chaser avatar .translate(-391, 62) .rotate(radians(12.26)) .addCircularImage(chaserAvatar, 0, 0, 25) // Draw the buffer .toBufferAsync(); } public async init() { this.KTemplate = await readFile(join(assetsFolder, './images/memes/chase.png')); } }
<gh_stars>1-10 /* * Copyright (C) 2016 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy ofthe License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specificlanguage governing permissions and * limitations under the License. * */ package flow import ( "net/http" "time" "github.com/skydive-project/skydive/common" ) // TableAllocator aims to create/allocate a new flow table type TableAllocator struct { common.RWMutex update time.Duration expire time.Duration tables map[*Table]bool } // Expire returns the expire parameter used by allocated tables func (a *TableAllocator) Expire() time.Duration { return a.expire } // Update returns the update parameter used by allocated tables func (a *TableAllocator) Update() time.Duration { return a.update } // QueryTable search/query within the flow table func (a *TableAllocator) QueryTable(tq *TableQuery) *TableReply { a.RLock() defer a.RUnlock() reply := &TableReply{ Status: int32(http.StatusOK), } for table := range a.tables { if b := table.Query(tq); b != nil { reply.FlowSetBytes = append(reply.FlowSetBytes, b) } } return reply } // Alloc instanciate/allocate a new table func (a *TableAllocator) Alloc(flowCallBack ExpireUpdateFunc, nodeTID string, opts TableOpts) *Table { a.Lock() defer a.Unlock() updateHandler := NewFlowHandler(flowCallBack, a.update) expireHandler := NewFlowHandler(flowCallBack, a.expire) t := NewTable(updateHandler, expireHandler, nodeTID, opts) a.tables[t] = true return t } // Release release/destroy a flow table func (a *TableAllocator) Release(t *Table) { a.Lock() delete(a.tables, t) a.Unlock() } // NewTableAllocator creates a new flow table func NewTableAllocator(update, expire time.Duration) *TableAllocator { return &TableAllocator{ update: update, expire: expire, tables: make(map[*Table]bool), } }
#include<stdio.h> //#define scanf scanf_s int main() { int t, n,max,i,a[100],temp,j,k; scanf("%d", &t); for (i = 1; i <= t; i++) { scanf("%d", &n); for (j = 0; j < n; j++) scanf("%d", &a[j]); for (j = 0; j < n - 1; j++) { for (max = j, k = j + 1; k < n; k++) { if (a[k] > a[max]) max = k; } temp = a[j]; a[j] = a[max]; a[max] = temp; } for (j = 0; j < n; j++) printf("%d ", a[j]); printf("\n"); } }
<reponame>yuanhang110/ZheyeProject<filename>zheye-servers2/src/main/java/com/evostar/netty/handler/MessageRequestHandler.java package com.evostar.netty.handler; import com.evostar.VO.UserVO; import com.evostar.model.ChatRecord; import com.evostar.model.User; import com.evostar.netty.request.LoginRequestPacket; import com.evostar.netty.request.MessageRequestPacket; import com.evostar.netty.response.MessageResponsePacket; import com.evostar.netty.utils.Session; import com.evostar.netty.utils.SessionUtil; import com.evostar.netty.utils.SpringUtil; import com.evostar.service.ChatRecordService; import com.evostar.service.UserService; import com.evostar.utils.RedisUtils; import io.netty.channel.Channel; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; @ChannelHandler.Sharable public class MessageRequestHandler extends SimpleChannelInboundHandler<MessageRequestPacket> { private static UserService userService; private static ChatRecordService chatRecordService; private static RedisUtils redisUtils; static { userService = SpringUtil.getBean(UserService.class); chatRecordService = SpringUtil.getBean(ChatRecordService.class); redisUtils = SpringUtil.getBean(RedisUtils.class); } public final static MessageRequestHandler INSTANCE = new MessageRequestHandler(); @Override protected void channelRead0(ChannelHandlerContext ctx, MessageRequestPacket msg) throws Exception { MessageResponsePacket responsePacket = new MessageResponsePacket(); // 获取当前的人 Session session = SessionUtil.getSession(ctx.channel()); UserVO fromUser = session.getUserVO(); responsePacket.setFromUser(fromUser); responsePacket.setContent(msg.getContent()); responsePacket.setTime(msg.getTime()); int toUserId = msg.getToUserId(); User toUser = userService.selectById(toUserId); if(toUser != null){ UserVO toUserVO = userService.getUserVO(toUser); responsePacket.setToUser(toUserVO); this.unreadNumInc(fromUser.getId(), toUserId); int unreadNum = this.getUnreadNum(fromUser.getId(), toUserId); responsePacket.setUnreadNum(unreadNum); this.save_record(responsePacket); Channel toUserChannel = SessionUtil.getChannel(String.valueOf(msg.getToUserId())); if(toUserChannel != null){ toUserChannel.writeAndFlush(responsePacket); } ctx.channel().writeAndFlush(responsePacket); }else{ System.out.println("用户不存在"); } } public void save_record(MessageResponsePacket messageResponsePacket){ //保存聊天记录 ChatRecord chatRecord = new ChatRecord(); chatRecord.setFromUserId(messageResponsePacket.getFromUser().getId()); chatRecord.setToUserId(messageResponsePacket.getToUser().getId()); chatRecord.setContent(messageResponsePacket.getContent()); chatRecord.setTime(messageResponsePacket.getTime()); chatRecordService.addRecord(chatRecord); } //获取未读数量 public int getUnreadNum(int fromUserId, int toUserId){ String key = fromUserId+"_UnreadNum_"+toUserId; if(!redisUtils.hasKey(key)){ return 1; }else{ return Integer.parseInt((String) redisUtils.getValue(key)); } } //未读数量+1 public void unreadNumInc(int fromUserId, int toUserId){ String key = fromUserId+"_UnreadNum_"+toUserId; redisUtils.increment(key, 1); } }
<reponame>blacknred/full-taskapp export enum Width { lg = "full", md = "3xl", sm = "sm", } export enum Privilege { EDIT_WORKSPACE = "EDIT_WORKSPACE", CREATE_AGENT = "CREATE_AGENT", READ_ANY_AGENT = "READ_ANY_AGENT", EDIT_ANY_AGENT = "EDIT_ANY_AGENT", DELETE_ANY_AGENT = "DELETE_AGENT", CREATE_SAGA = "CREATE_SAGA", READ_ANY_SAGA = "READ_ANY_SAGA", EDIT_ANY_SAGA = "EDIT_ANY_SAGA", DELETE_ANY_SAGA = "DELETE_ANY_SAGA", CREATE_TASK = "CREATE_TASK", READ_ANY_TASK = "READ_ANY_TASK", EDIT_ANY_TASK = "EDIT_ANY_TASK", DELETE_ANY_TASK = "DELETE_TASK", } export enum BaseStage { TODO = "TODO", IN_PROGRESS = "IN_PROGRESS", DONE = "DONE", } export enum BaseLabel { MINOR = "MINOR", ROUTINE = "ROUTINE", CRITICAL = "CRITICAL", } /** UI */ export interface ViewOptions<T> { variant?: "list" | "grid"; "sort.field"?: keyof T; "sort.order"?: "ASC" | "DESC"; limit?: 10 | 50 | 100; } export type ITaskViewOptions = ViewOptions< Omit<ITask, "id" | "description" | "userId"> >; /* DTO */ export interface IPaginated<T> { hasMore: boolean; total: number; items: T[]; } export type ValidationError = { field: string; message: string; }; export interface IResponse<T = unknown> { status: number; errors: ValidationError[] | null; data: T; } /* ENTITIES */ export interface IUser { id: number; name: string; email: string; image?: string; isAdmin: boolean; createdAt: string; updatedAt: string; } export type IAuth = Partial<IUser> & { vapidPublicKey?: string; }; export interface IPushSubscription { endpoint: string; expirationTime?: number; keys: { auth: string; p256dh: string; }; } export interface IRole { name: string; privileges: Privilege[]; } export interface IWorkspace { id: string; name: string; description?: string; stages: string[]; labels: string[]; roles: IRole[]; creatorId: number; createdAt: string; updatedAt: string; // agent?: IAgent; } export interface IUpdateRecord { field: string; prev: unknown; next: unknown; } export interface ITaskUpdate { records: IUpdateRecord[]; agent: IAgent; createdAt: string; } export interface ITask { id: string; name: string; description?: string; stage?: string; label?: string; createdAt: string; expiresAt?: string; updates: ITaskUpdate[]; creator: IAgent; assignee?: IAgent; sagas: Pick<ISaga, "id" | "name">[]; } export interface ISaga { id: string; name: string; description?: string; createdAt: string; expiresAt?: string; creator: IAgent; } export interface IAgent { id: string; userId: number; name: string; image?: string; createdAt: string; role?: string; }
<reponame>nibalizer/galileo<filename>snotrocket/snotrocket_sync.py #!/usr/bin/python # application to follow the snot log and refresh the snotrocket # cache as necessary # doesn't return, must be run as a daemon from elasticsearch import Elasticsearch import tailer from snotrocket_populate import import_ticket import snotparser.snotparser as sp snot_log = '/u/snot/test/logs/log' #snot_log = 'test_log' # for testing es_index = 'snotrocket' es = Elasticsearch() # Follow the snot log file for line in tailer.follow(open(snot_log)): print line ticket_number = int(line.split()[9]) print "processing updates to ticket {0}".format(ticket_number) import_ticket(ticket_number, es_index)
<filename>packages/core/Server.ts // # Modules import { getPublic } from '@config/env' import http from "http" import handler from "serve-handler" // # Interfaces import { Configuration } from '@main/Program'; export default class Server { config: Configuration constructor(config: Configuration) { this.config = config } public exec() { let port: number = this.config.port http.createServer(async (req, res) => { return await handler(req, res, { cleanUrls: true, public: getPublic() }) }).listen(port) } }
def translate_selected_nodes_shape_cvs(translate_list): selected_nodes = maya.cmds.ls(sl=True, long=True) if not selected_nodes: return return translate_node_shape_cvs(selected_nodes, translate_list)
export { default as Button } from './button'; export { default as Card } from './card'; export { default as CardStack } from './card-stack'; export { default as Divider } from './divider'; export { default as Header } from './header'; export { default as Icon } from './icon'; export { default as Section } from './section'; export { default as Dropdown } from './dropdown'; export { default as Item } from './item'; export { default as Message } from './message'; export { default as Collapse } from './collapse'; export { default as Panel } from './collapse/panel'; export { default as Modal } from './modal'; export { default as Dialog } from './dialog'; export { default as Tabs } from './tabs'; export { default as Tab } from './tabs/tab'; export { default as Input } from './input'; export { default as Radio } from './radio'; export { default as RadioGroup } from './radio/radioGroup'; export { default as Checkbox } from './checkbox'; export { default as CheckboxGroup } from './checkbox/checkboxGroup'; export { default as Switch } from './switch'; export { default as Select } from './select'; export { default as Option } from './select/option'; export { default as Textarea } from './textarea'; export { default as Alert } from './alert'; export { default as InfiniteScroll } from './infinite-scroll'; export { default as Loading } from './loading'; export { default as Tabbar } from './tabbar'; export { default as Sidebar } from './sidebar'; export { default as Avatar } from './avatar'; export { default as useAnchor } from './useAnchor';
Is There Any Age Cutoff to Treat Elderly Patients with Head and Neck Cancer? Comparing with Septuagenarians and Octogenarians With the increase in life expectancy, age is no longer considered as a limitation for treatment. Nevertheless, the treatment of elderly patients with head and neck cancer (HNC) remains controversial. Here, we aimed to review our experience with the treatment for elderly patients, while particularly focusing on the differences among older old patients (septuagenarians vs. octogenarians). We retrospectively reviewed the records of 260 elderly patients who were assigned to 3 groups according to age: 70 years old ≤ group 1 < 75 years old, 75 years old ≤ group 2 < 80 years old, and group 3 ≥ 80 years old. The patients were assessed for comorbidities using the Adult Comorbidity Evaluation (ACE)-27, and the American Society of Anesthesia (ASA) physical status was also compared. Group 1, 2, and 3, consisted of 97, 102, and 61 patients, respectively. No significant difference in demographic data was noted among the groups. However, group 3 showed more comorbidities than groups 1 and 2. With regard to the initial treatment for HNC, radiation therapy (RT) was more frequently performed in group 3 than in groups 1 and 2. Among 7 patients of non-compliant to treatment in group 3, 6 patients had have performed RT. In group 3, a total of 18 patients underwent surgery, including microvascular free flap reconstruction and no significant difference in complications was observed postoperatively compared with group 1 and 2. Moreover, no significant difference was noted in overall survival between the groups, regardless of the treatment modality chosen. In conclusion, octogenarians with HNC should be more carefully managed than septuagenarians with HNC. Surgical treatment can be considered in octogenarians with HNC, if it can be tolerated. INTRODUCTION Due to lower birth rates and better healthcare, the average age of the world's population has increased. In Korea in particular, rapid increases in life expectancy have been observed over the past few decades, with the life expectancy of females increasing from 79.6 years in 2000 to 85.1 years in 2013 (1). This increase in life expectancy is closely related in the case of various cancers, since about 60% of all tumors arise in patients who are older than 65 years. Furthermore, 70% of all deaths due to cancer occur in elderly patients (2)(3)(4). Although the majority of cases with head and neck cancer (HNC) occur between the fifth and sixth decade of life, the onset of the disease in patients older than 60 years is common (4), with up to 24% of HNC cases diagnosed in patients older than 70 years (2,3). Due to the increase in life expectancies as well as the improvements in global cancer care and survival rates, the traditional age limit of 65 years used by the European Organization for Research and Treatment of Cancer (EORTC) has been challenged in recent years. Moreover, sub-categories of "younger old" (65-70 years old) and "older old" (> 80 years old) have been introduced to allow for the allocation of elderly patients with cancer to homogenous patient groups (5). Because of the complexity of the anatomy and function of head and neck region and aggressive features of tumor arising head and neck area, the treatment of HNC is often associated with high morbidity and mortality rates. In general, it is common for elderly patients to display poor physical functions and poor social support. Also, elderly patients with HNC tend to display numbers of comorbidities. As the number of elderly patients with HNC increases, head and neck surgeons are increasingly faced with a therapeutic dilemma. In fact, several studies indicate that older patients with HNC are less likely to receive curative treatment compared to their younger counterparts (6)(7)(8)(9). However, a number of recent studies have shown that radical surgical or radiotherapy treatment can be performed safely in elderly patients without an increase in overall complication rates, provided that the patients do not have severe comorbidities; these findings have led to a number of debates (6)(7)(8). In a study evaluating HNC patients who were 80 years of age or older, Italiano et al. (10) reported that, despite their age, overall survival was similar to the actuarial survival for general octogenarian populations, and that there was no significant difference in the frequency of preoperative and postoperative complications compared to younger patients (aged 65 years or younger). Indeed, as has previously been suggested (4,11,12), chronological age alone should not be a contraindication to an aggressive surgical approach, which should be attempted whenever the risk assessment ratio is favorable. It is hard to make a decision of age cutoff to treat radically in HNC patients. In this study, we aim to know which the chronological age would be cutoff to treat HNC comparing septuagenarians and octogenarians by reviewing our treatment strategies for elderly patients with HNC. We also analyzed the effect of age on the choice of initial treatment and compliance to treatment of HNC patients. We also reviewed our experience with older old patients (octogenarians) with HNC. MATERIALS AND METHODS We performed a retrospective analysis of 260 HNC patients aged 70 years or older at the first diagnosis between 2000 and 2012. Patients who had thyroid cancer and lymphoma were excluded. Patients were divided into 3 groups according to age: group 1 consisted of patients of ages between 70 and 74 years; group 2 consisted of those aged between 75 and 79 years; and group 3 consisted of those who were 80 years of age or older. The followup period ranged from 0 to 133.5 months, with a median followup period of 29.4 months. Demographic data such as age, sex, location of primary tumor, pathology, and TNM staging were reviewed, in addition to data regarding treatment modalities, treatment compliance, and outcome of treatment (overall survival). In order to assess the effect of comorbidities on elderly patients, we examined various indices for the general condition of elderly patients using the Adult Comorbidity Evaluation (ACE)-27 (13) and the American Society of Anesthesiologists' (ASA) risk classification system. ACE-27 provides a comprehensive review of the condition of the cardiovascular, gastro-intestinal, renal, endocrine, neurological, psychiatric, rheumatologic, and immunological systems, as well as body weight and any reported malignancies or substance abuse. Each category contains 3 grades (1, mild; 2, moderate; and 3, severe), with the overall comorbidity score defined according to the highest ranked single disease. Two or more grade 2 ailments occurring in different organ systems indicate a classification of grade 3. ACE-27 grades were allotted as 0, 1, 2, or 3 in the hospital information system. A complete ACE-27 data form is available on http://oto.wustl. edu/clinepi/calc.html (Clinical Outcomes Research Office's Website) (14). ACE-27 has been widely validated for HNC (13). The ASA class of the patient was obtained from the original anesthesia form assigned by the attending anesthesiologist. The latter is an index for perioperative risk; however, it can also be used to evaluate comorbidity as it describes the patient's physical status prior to surgery (15). ASA classifications are as follows: class 1, a normal healthy patient; class 2, a patient with mild systemic disease; class 3, a patient with severe systemic disease; class 4, a patient with disease that is a constant threat to life; class 5, a moribund patient who is not expected to survive without surgery; and class 6, a brain-dead patient (14). Demographic data were subjected to univariate analysis using Fischer's exact or χ 2 tests, and the Mann-Whitney rank sum test. For statistical analysis, a P value less than 0.05 was considered significant. Kaplan-Meier estimates of the cumulative probability of overall survival were obtained. All statistical analyses were conducted using the SPSS software, version 18.0 (SPSS Inc., Chicago, IL, USA) Ethics statement The institutional review board at the Seoul National University College of Medicine, Seoul, Korea reviewed and approved the study protocol and exempted the informed consent for this study (IRB No. H-1406-120-591). We performed all procedures in accordance with the tenets of the World Medical Association's Declaration of Helsinki. Demographic data Demographic data of elderly patients with HNC are shown in Table 1. All patients had biopsy-proven HNC and were staged according to the TNM staging of the American Joint Committee (Table 1). In all groups, the majority of patients had squamous cell carcinoma (SCC) according to their pathological reports. The primary cancer distribution appeared to be similar in all three groups. However, there was a trend towards a greater number of patients with cancer of the hypopharynx, larynx, and sinus in the oldest patient group (group 3). Interestingly, there was no patient with cancer of the nasopharynx and salivary gland in group 3. The distribution of T staging (T1,2/ T3,4) was similar in the 3 groups, whereas an advanced N stage at the first diagnosis was more frequently observed in group 3. The data associated with comorbidities among the elderly HNC patients are presented in Table 2. Except those in group 3, a few patients in group 1 and 2 were classified as grade 0 according to the ACE-27. Group 3 consisted of patients with higher ACE-27 and ASA classification indices, and it displayed significantly more comorbidities such as diabetes, hypertension, or cerebrovascular accidents, compared to the younger groups (groups 1 and 2) (P < 0.005). In group 3, 15 patients (24.6%) were classified as ACE-27 grade 3 and 5 patients (8.2%) were classified as ASA class 4. There was no significant difference in the indices of comorbidity between group 1 and group 2. Treatment In total, 180 of 260 patients (69.2%) completed standard treatment for HNC, including 70 (72.2%) in group 1, 69 (67.6%) in group 2, and 41 (67.2%) in group 3 (Fig. 1). Fifty-one patients discontinued treatment for various reasons. In group 3, 13 patients (21.3%) refused to receive any treatment at all, and the reluctance rate to receive treatment was statistically significantly different between group 1, 2 (septuagenarians) and group 3 (octogenarians) (P = 0.006). On the other hand, there was no significant difference in treatment compliance between group 1 and group 2. With regard to the initial treatment modality for HNC, group 3 displayed greater preference for radiation therapy (RT) than groups 1 and 2 (Fig. 2). On the other hand, the preference for chemotherapy was relatively low in group 3. Among 51 patients who ceased treatment after it has started, 44 patients belonged to groups 1 and 2. Over half of these (30/51, 58.8%) did not complete RT, followed by 12 patients (23.5%) who did not complete concurrent chemo-radiation therapy (CCRT), and 9 patients (17.6%) who did not complete chemotherapy. Among 7 patients who discontinued treatment in group 3, 6 patients did not complete RT and 1 patient did not complete CCRT. Among octogenarian patients, 18 (41.9%) patients chose sur- gery as their initial treatment for HNC. Most of these patients had one or more diseases that would affect their general medical condition, and thus, influence the decision on whether to undergo surgery. With the exception of 2 patients, all patients displayed indices associated with ACE-27 grade 1, 2, and 3. In addition, 10 patients were classified as ASA class 3. Table 3 lists the types of surgeries performed on elderly HNC patients according to age groups. Comparing relatively younger group (group 1 and 2), octogenarians even underwent radical surgery including neck dissection, maxillectomy, mandibulectomy, and laryngectomy. Furthermore, 4 of them underwent microvascularized free tissue transfer for reconstruction, including the use of anterolateral thigh flap, scapular flap, latissimus dorsi flap, and rectus abdominis flap. All patients underwent general anesthesia ranging from 75 to 795 minutes, with a mean anesthesia time of 300 minutes for octogenarians. Eight of the latter patients required a transfusion of 1 or 2 packed red blood cells during operation, and 5 patients were admitted to the surgical intensive care unit (SICU) for recovery for a maximum of 2 days. The mean hospitalization time ranged from 3 days to 30 days, with a mean hospitalization of 13 days. Only 1 patient had an intraoperative complication due to a heart rhythm problem (PAC) after the induction of general anesthesia. Postoperative complications occurred in 9 patients, including 4 patients (22.2%) with major complications in group 3 (Fig. 3). Major postoperative complications included pneumonia, arrhythmia, wound infection, and development of a fistula. Genitourinary and pulmonary complications were often occurred in group 3. During the 22.5 months of median follow-up, the 2-year disease free survival of octogenarians was 52.5% and disease-specific survival was 62.5%. Among the patients who completed the treatment for HNC, there were no significant differences in overall survival between the age groups (Fig. 4). While only considering patients who underwent surgery for HNC, we did not find any difference in survival between septuagenarians and octogenarians. The average 2-year survival of group 3 was 65.7%, which was not significantly different to that in the general octogenarian population in Korea. DISCUSSION It has been reported that HNC often displays aggressive progression and poor prognosis. Treatment decisions for HNC patients are complicated by the fact that the head and neck area is ana- tomically complex and plays vital physiological roles. From our review of the case history of elderly patients with HNC, we found that the majority of these patients presented with one or more comorbidities that affected the choice of the treatment modality. In general, surgery, RT, and CCRT have long been the major treatment approaches for advanced HNC. However, the management of HNC in a geriatric group is far more complex due to the high toxicity of loco-regional treatments and the high risk of functional deterioration in elderly HNC patients with a high comorbidity burden and impaired functional status (17). For these reasons, the treatment for HNC of elderly patients represents a global health challenge. Several studies have reported on the treatment of the geriatric population, with variable age limits. First, it is important to clarify what defines a person as "elderly. " This definition has been modified in recent years due to the increasing global life expectancy. In 2014, the life expectancy of Korean females was over 80 years old at birth, and the mean age of our study group was 76.9 years. The classical definition of an elderly person originally referred to individuals who were at least 65 years of age in 1998, and this definition was used by the EORTC in clinical trials of radical RT to treat HNC (18). However, at present, an age of 65 years is no longer considered to describe an individual as "elderly". There have been many studies about geriatrics, and the definition of "elderly" was variable. Because of differences in efficacy of treatment such as altered pharmacokinetics and dynamics are observed after that age (19), age 70 is a reference point commonly used in clinical trials in oncology (20). We defined "elderly" patients were aged ≥ 70 years at the time of diagnosis, and we sub-divided elderly patients into 3 groups to find any clinical differences between them; age ranged from 70 to 74, 75 to 79, and ≥ 80 years. In our study groups, the most prevalent primary tumor site was the oral cavity and the larynx, followed by the oropharynx, and hypopharynx. It was concordant with a study of HNC patients who were at least 80 years old (10). While reviewing our groups, there were significant differences of general conditions scaling by ACE-27 and ASA between their seventies and eighties or over. In patients aged over 80 years old, the compliance of treatment was also significantly decreased. Among the patients who started to treat their HNC, the preference for surgery was dramatically decreased in group 3, which was consistent with previous studies of older HNC patients either surgery alone (21) or in combination with RT or CCRT (11). In contrast, RT was the most preferred in group 3. RT was thought to show lower toxicity than chemotherapy and thus, RT would be considered as an attractive curative option (alone or with systemic therapy) for elderly patients who refused to undergo surgery due to their frailty. It was supported by several studies which have described RT as a safe and effective treatment for HNC even in elderly patients (6,22,23). In addition to the difficulties in terms of the choice of treatment modality for HNC, treatment compliance can also be problematic in elderly patients, due to several factors including progressive loss of stress tolerance, decline in functional reserve of multiple organ systems, high prevalence of comorbid conditions, limited socioeconomic support, reduced cognition, and higher prevalence of depression (24). Although preference of RT as an initial treatment for HNC was high in elderly patients, compliance to RT was poor in our study groups. Even over half of patients who agreed to receive RT eventually discontinuing treatment. This finding is consistent with a recent study, which reported a treatment compliance of approximately 60% for both radical and palliative purposes in elderly patients (24). Since elderly patients had to receive RT 5 times per week, for several weeks in an outpatient clinic in Korea, such prolonged treatment can be an important cause of low compliance with RT. Meanwhile, many studies have suggested that chronological age alone should not be a contraindication to an aggressive surgical approach and it should be attempted whenever risk-assessment ration is favorable (4,11,12). In addition to aggressive radical surgery, microvascular reconstruction in the elderly can be performed with high success rates even in the octogenarian group (25). In our study, radical surgery such as maxillectomy, mandibulectomy, or laryngectomy and even free microvascular reconstruction were reportedly successfully performed in octogenarian patients. Despite the fact that the latter patients displayed at least 2 comorbidities, they showed no significant postoperative complications associated with these radical surgeries. The limitation of this study lies in the fact that the analysis was retrospective and represented the case histories of a relatively small number of patients. In addition, our study groups were so heterogeneous to reveal survival differences due to including all types of HNC. However, the incidence of HNC is relatively lower than other malignancies and even elderly patients of HNC are rare. For those reasons, our study has meaning that we can overview the results of treatment and compliances of treatment in elderly HNC patients. Moreover, our study focused on an Asian population, which has shown rapid increases in life expectancy over the past several decades, contrast to previous large-scale studies in Western countries. Previous studies had shown various limitations in the treatment of elderly HNC patients. In our study, we divided a cohort of elderly patients into several groups, and analyzed the difference between elderly patients and older old patients. No significant differences were presented between the age groups; however, the tumor stage at the first diagnosis became more advanced as age increased. There were no significant differences in comorbidities between patients aged between 70 and 75 years and those aged between 76 and 80 years. However, octogenarians had signifihttp://dx.doi.org/10.3346/jkms.2016.31.8.1300 cantly more comorbidities than septuagenarians. With regard to care for patients with HNC, octogenarians should receive special attention because they have more comorbidities than septuagenarians. Despite being octogenarians and having several comorbidities, most of these patients were treated with surgical procedures without significant complications. RT is preferred to various other treatment modalities, and yields several advantages for elderly patients. Considering the low compliance of patients to RT and its prolonged treatment duration, surgery for octogenarians would be an excellent treatment option whenever the risk assessment ratio is determined to be favorable.
/** * Public key reconstruction. (Sec 4, 3.5). * * @param idInfo - user identification data. * @param reconstructionPoint - public key reconstruction data. * @return reconstructed user public key. */ public ECDSA extractPublic( final Binary idInfo, final Binary reconstructionPoint ) { ECCurve ecCurve = issuerKey.getCurve(); BigInteger n = ecCurve.getOrder(); ECPoint Pu = ecCurve.decodePoint( reconstructionPoint ); ECPoint Qca = ecCurve.decodePoint( issuerKey.getPublic() ); BigInteger e = calculateE( n, Bin( idInfo, reconstructionPoint ) ); Binary Qu = Pu.multiply( e ).add( Qca ).getEncoded( false ); return new ECDSA( ecCurve ).setPublic( Qu ); }
// SPDX-License-Identifier: GPL-2.0-only /* * Copyright (c) 2012-2020 Synaptics Incorporated */ #include <linux/kernel.h> #include <linux/rmi.h> #include <linux/input.h> #include <linux/slab.h> #include "rmi_driver.h" #define RMI_F3A_MAX_GPIO_COUNT 128 #define RMI_F3A_MAX_REG_SIZE DIV_ROUND_UP(RMI_F3A_MAX_GPIO_COUNT, 8) /* Defs for Query 0 */ #define RMI_F3A_GPIO_COUNT 0x7F #define RMI_F3A_DATA_REGS_MAX_SIZE RMI_F3A_MAX_REG_SIZE #define TRACKSTICK_RANGE_START 3 #define TRACKSTICK_RANGE_END 6 struct f3a_data { /* Query Data */ u8 gpio_count; u8 register_count; u8 data_regs[RMI_F3A_DATA_REGS_MAX_SIZE]; u16 *gpio_key_map; struct input_dev *input; struct rmi_function *f03; bool trackstick_buttons; }; static void rmi_f3a_report_button(struct rmi_function *fn, struct f3a_data *f3a, unsigned int button) { u16 key_code = f3a->gpio_key_map[button]; bool key_down = !(f3a->data_regs[0] & BIT(button)); if (f3a->trackstick_buttons && button >= TRACKSTICK_RANGE_START && button <= TRACKSTICK_RANGE_END) { rmi_f03_overwrite_button(f3a->f03, key_code, key_down); } else { rmi_dbg(RMI_DEBUG_FN, &fn->dev, "%s: call input report key (0x%04x) value (0x%02x)", __func__, key_code, key_down); input_report_key(f3a->input, key_code, key_down); } } static irqreturn_t rmi_f3a_attention(int irq, void *ctx) { struct rmi_function *fn = ctx; struct f3a_data *f3a = dev_get_drvdata(&fn->dev); struct rmi_driver_data *drvdata = dev_get_drvdata(&fn->rmi_dev->dev); int error; int i; if (drvdata->attn_data.data) { if (drvdata->attn_data.size < f3a->register_count) { dev_warn(&fn->dev, "F3A interrupted, but data is missing\n"); return IRQ_HANDLED; } memcpy(f3a->data_regs, drvdata->attn_data.data, f3a->register_count); drvdata->attn_data.data += f3a->register_count; drvdata->attn_data.size -= f3a->register_count; } else { error = rmi_read_block(fn->rmi_dev, fn->fd.data_base_addr, f3a->data_regs, f3a->register_count); if (error) { dev_err(&fn->dev, "%s: Failed to read F3a data registers: %d\n", __func__, error); return IRQ_RETVAL(error); } } for (i = 0; i < f3a->gpio_count; i++) if (f3a->gpio_key_map[i] != KEY_RESERVED) rmi_f3a_report_button(fn, f3a, i); if (f3a->trackstick_buttons) rmi_f03_commit_buttons(f3a->f03); return IRQ_HANDLED; } static int rmi_f3a_config(struct rmi_function *fn) { struct f3a_data *f3a = dev_get_drvdata(&fn->dev); struct rmi_driver *drv = fn->rmi_dev->driver; const struct rmi_device_platform_data *pdata = rmi_get_platform_data(fn->rmi_dev); if (!f3a) return 0; if (pdata->gpio_data.trackstick_buttons) { /* Try [re-]establish link to F03. */ f3a->f03 = rmi_find_function(fn->rmi_dev, 0x03); f3a->trackstick_buttons = f3a->f03 != NULL; } drv->set_irq_bits(fn->rmi_dev, fn->irq_mask); return 0; } static bool rmi_f3a_is_valid_button(int button, struct f3a_data *f3a, u8 *query1_regs, u8 *ctrl1_regs) { /* gpio exist && direction input */ return (query1_regs[0] & BIT(button)) && !(ctrl1_regs[0] & BIT(button)); } static int rmi_f3a_map_gpios(struct rmi_function *fn, struct f3a_data *f3a, u8 *query1_regs, u8 *ctrl1_regs) { const struct rmi_device_platform_data *pdata = rmi_get_platform_data(fn->rmi_dev); struct input_dev *input = f3a->input; unsigned int button = BTN_LEFT; unsigned int trackstick_button = BTN_LEFT; bool button_mapped = false; int i; int button_count = min_t(u8, f3a->gpio_count, TRACKSTICK_RANGE_END); f3a->gpio_key_map = devm_kcalloc(&fn->dev, button_count, sizeof(f3a->gpio_key_map[0]), GFP_KERNEL); if (!f3a->gpio_key_map) { dev_err(&fn->dev, "Failed to allocate gpio map memory.\n"); return -ENOMEM; } for (i = 0; i < button_count; i++) { if (!rmi_f3a_is_valid_button(i, f3a, query1_regs, ctrl1_regs)) continue; if (pdata->gpio_data.trackstick_buttons && i >= TRACKSTICK_RANGE_START && i < TRACKSTICK_RANGE_END) { f3a->gpio_key_map[i] = trackstick_button++; } else if (!pdata->gpio_data.buttonpad || !button_mapped) { f3a->gpio_key_map[i] = button; input_set_capability(input, EV_KEY, button++); button_mapped = true; } } input->keycode = f3a->gpio_key_map; input->keycodesize = sizeof(f3a->gpio_key_map[0]); input->keycodemax = f3a->gpio_count; if (pdata->gpio_data.buttonpad || (button - BTN_LEFT == 1)) __set_bit(INPUT_PROP_BUTTONPAD, input->propbit); return 0; } static int rmi_f3a_initialize(struct rmi_function *fn, struct f3a_data *f3a) { u8 query1[RMI_F3A_MAX_REG_SIZE]; u8 ctrl1[RMI_F3A_MAX_REG_SIZE]; u8 buf; int error; error = rmi_read(fn->rmi_dev, fn->fd.query_base_addr, &buf); if (error < 0) { dev_err(&fn->dev, "Failed to read general info register: %d\n", error); return -ENODEV; } f3a->gpio_count = buf & RMI_F3A_GPIO_COUNT; f3a->register_count = DIV_ROUND_UP(f3a->gpio_count, 8); /* Query1 -> gpio exist */ error = rmi_read_block(fn->rmi_dev, fn->fd.query_base_addr + 1, query1, f3a->register_count); if (error) { dev_err(&fn->dev, "Failed to read query1 register\n"); return error; } /* Ctrl1 -> gpio direction */ error = rmi_read_block(fn->rmi_dev, fn->fd.control_base_addr + 1, ctrl1, f3a->register_count); if (error) { dev_err(&fn->dev, "Failed to read control1 register\n"); return error; } error = rmi_f3a_map_gpios(fn, f3a, query1, ctrl1); if (error) return error; return 0; } static int rmi_f3a_probe(struct rmi_function *fn) { struct rmi_device *rmi_dev = fn->rmi_dev; struct rmi_driver_data *drv_data = dev_get_drvdata(&rmi_dev->dev); struct f3a_data *f3a; int error; if (!drv_data->input) { dev_info(&fn->dev, "F3A: no input device found, ignoring\n"); return -ENXIO; } f3a = devm_kzalloc(&fn->dev, sizeof(*f3a), GFP_KERNEL); if (!f3a) return -ENOMEM; f3a->input = drv_data->input; error = rmi_f3a_initialize(fn, f3a); if (error) return error; dev_set_drvdata(&fn->dev, f3a); return 0; } struct rmi_function_handler rmi_f3a_handler = { .driver = { .name = "rmi4_f3a", }, .func = 0x3a, .probe = rmi_f3a_probe, .config = rmi_f3a_config, .attention = rmi_f3a_attention, };
class Tile: # This class may extend a PyGame Button class in the future, depending on # the implementation """ A Tile class. This is used to represent a tile inside the grid in a Minesweeper game. This is an abstract class that should not be instantiated directly. This class is extended by BombTile, EmptyTile and NumberTile classes. #TODO Reorganize attributes, functions, and documentation as needed === Public Attributes === === Private Attributes === _board: Stores the board that the Tile is in. _pos: Stores the position of the Tile in (x, y) format. _flagged: Stores a boolean indicating whether the Tile is flagged. _revealed: Stores whether the Tile's contents are revealed on the board. For example, the tile may be clicked to reveal a number, bomb, or empty space. _tile_type: Stores the string representation of this type of tile === Representation Invariants === -_flagged and _revealed cannot store the same boolean value -_pos must contain values only within the game grid's range """ _board: List[List[Tile]] # _icon: #TODO decide the type of _icon _pos: Tuple[int, int] _flagged: bool _revealed: bool _tile_type: str def __init__(self, board: List[List[Tile]], position: Tuple[int, int]): """ Initialize the tile with <board> and <position>. Initially, _flagged and _revealed are set to False, as the player has not clicked or flagged any of the Tiles. Precondition: The constructor is called only when #TODO complete doc :param: board - the Board that this Tile is stored in. :param: position - the location of the Tile in the Board """ self._board = board self._icon = None self._pos = position self._flagged = self._revealed = False self._tile_type = "Tile" # TODO: Modify if necessary def reveal_tile(self) -> bool: """ Update the Board state based on the type of tile clicked. This is an abstract method. Precondition: Method is called only when the player left-clicks the tile at _pos in the game grid. The tile cannot be flagged. Post condition: return true if the game doesn't end after this click otherwise, return false """ if self._revealed is True or self._flagged is True: # Do nothing if revealed or flagged return True else: self._reveal() return self._calculate_click() def _calculate_click(self) -> bool: """ This method is only called by reveal_tile. This defines the behaviour of a tile when the tile is clicked and revealed. Different tiles have different behaviour. For example, empty tiles reveal the surrounding empty and numbered tiles. returns false if it is a bomb tile. returns true otherwise """ raise NotImplementedError def flag_tile(self) -> bool: """ If the tile is not revealed, then change the flagged state of this tile. Preconditions: _revealed must be False, and the function is called only by a right-click. Post conditions: return whether flagging action is successful """ if self._revealed is False: self._flag() return True return False def _flag(self) -> None: """ Changes the boolean value stored in _flagged. """ self._flagged = not self._flagged def is_flagged(self): """ Return whether the tile is flagged. :return: The value stored in _flagged. """ return self._flagged def get_position(self) -> Tuple[int, int]: """ Gets the position of the tile. :return: The position of the Tile in the form of (row, col) """ return self._pos def get_tile_type(self) -> str: """ Return "flag" if the tile is flagged. Otherwise, return "closed" if the tile has not been clicked. If it has been clicked, return the type of this tile. :return: A string representation of the tile's state. This is either "flag", "closed", or the stored _tile_type. """ if self.is_flagged(): return "flag" elif not (self.is_revealed()): return "closed" else: return self._tile_type def is_same_type(self, other: Tile) -> bool: """ :param other: A Tile object :return: Whether both this object and the other tile object are the same. """ return self.get_tile_type() == other.get_tile_type() def _reveal(self) -> None: """ Reveals the tile. """ self._revealed = True def is_revealed(self) -> bool: """ :return: A boolean. True if this tile is revealed, otherwise false. """ return self._revealed def to_string(self) -> str: """ :return: A String representation of this Tile. """ if not self.is_revealed(): if self.is_flagged(): return "[F]" else: return "[|]" return "[" + self.get_symbol() + "]" def get_symbol(self) -> str: """ :return: A String representation of the contents of this tile. """ raise NotImplementedError
/** * Cause the interrupt hook to be called when the next * JavaScript instruction starts to execute. * <p> * The interrupt is self clearing * @see netscape.jsdebug.DebugController#setInterruptHook */ public void sendInterrupt() throws ForbiddenTargetException { PrivilegeManager.checkPrivilegeEnabled("Debugger"); sendInterrupt0(); }
<reponame>VenDream/goindex<gh_stars>0 /** * Goindex page script * @author VenDream<<EMAIL>> * @since 2020-4-20 */ alert('hello');
def query_album(self, album, bonus): data = None by_word = ' {} '.format(self.translate('by')) if len(album.split(by_word)) > 1: album, artist = album.split(by_word) album_search = album else: album_search = album albums = self.client.list('album') if len(albums) > 0: key, confidence = match_one(album.lower(), self.albums) self.log.info("MPD Album: " + album + " matched to " + key + " with conf " + str(confidence)) data = {'data': self.client.search('album', key)[0], 'name': key, 'type': 'album'} return confidence, data else: return NOTHING_FOUND
Satirical Texas Bill Would Fine Men's Masturbation, Set Viagra Waiting Period A bill introduced in the Texas House of Representatives on Friday would fine men for masturbating, allow doctors to refuse to prescribe Viagra and require men to undergo a medically unnecessary rectal exam before any elective vasectomy. State Rep. Jessica Farrar, who introduced the bill, tells The Texas Tribune she knows the satirical legislation will never be passed. But she hopes it will start a conversation about abortion restrictions. The measure turns the language of abortion laws against men. One section would protect doctors who refuse to perform vasectomies, prescribe Viagra or perform colonoscopies. It mirrors a Texas law passed earlier this year that bans "wrongful birth" lawsuits — when parents sue a doctor for failing to inform them of a fetus's disability or their abortion options. That kind of suit was already virtually nonexistent, the Dallas Morning News reports. Another section would require men seeking health services to review a pamphlet called "A Man's Right to Know" with their doctor. The bill specifies that the pamphlet must include "medically accurate, objective and complete" information. Texas' legally mandated "A Woman's Right to Know" pamphlet has been sharply criticized as including inaccurate and misleading information. The bill would require a 24-hour waiting period for men's vasectomies or Viagra prescriptions, just like Texas' 24-hour waiting period for abortions. It would also mandate a "medically-unnecessary digital rectal exam and magnetic resonance [imaging] of the rectum" before any elective vasectomy, colonoscopy or Viagra prescription. Texas, like several other states, requires that an ultrasound be performed before any abortion, even if it's not considered medically necessary by a doctor. The suggestion of a digital rectal exam brings to mind transvaginal ultrasounds, in particular — a procedure that was required by one Virginia anti-abortion bill, though not specifically mandated in the law's final form. And the proposal that men's masturbation be regulated, with fines for any emissions that are "outside of a woman's vagina" or not saved for future conception? It's justified on the grounds of preserving the "sanctity of life." Farrar, a Democrat from Houston serving her 11th term in the state House of Representatives, has been an outspoken critic of a number of Texas laws limiting abortions, the Tribune reports. This session, she has spoken against bills that would require hospitals to bury or cremate fetal remains (similar to a state rule that was recently blocked) and that "would charge both abortion providers and women who receive an abortion with murder," the newspaper writes. Farrar spoke to the Tribune about what she hopes to accomplish with her Men's Right to Know bill: " 'What I would like to see is this make people stop and think,' Farrar told The Texas Tribune. 'Maybe my colleagues aren't capable of that, but the people who voted for them, or the people that didn't vote at all, I hope that it changes their mind and helps them to decide what the priorities are.' ... "In proposing a fine for masturbation, Farrar says that if a man's semen is not used to create a pregnancy, 'then it's a waste ... because that semen can be used — and is to be used — for creating more human life.' " "Men have to answer for their actions," Farrar told the paper. Rep. Tony Tinderholt — who introduced the bill that would cause both women and their doctors to be charged with murder for an abortion — told the Tribune he was "embarrassed" for Farrar and that her attempt to compare men's reproductive health care to abortion "shows a lack of a basic understanding of human biology."
<filename>software/perception/image_io_utils/src/image-warp/ImageWarper.cpp #include <memory> #include <string> #include <lcm/lcm-cpp.hpp> #include <drc_utils/LcmWrapper.hpp> #include <drc_utils/BotWrapper.hpp> #include <bot_param/param_client.h> #include <bot_param/param_util.h> #include <bot_core/camtrans.h> #include <lcmtypes/bot_core/image_t.hpp> #include <opencv2/opencv.hpp> #include <ConciseArgs> struct ImageWarper { std::shared_ptr<drc::LcmWrapper> mLcmWrapper; std::shared_ptr<lcm::LCM> mLcm; std::shared_ptr<drc::BotWrapper> mBotWrapper; std::string mInputChannel; std::string mOutputChannel; BotParam* mBotParam; lcm::Subscription* mSubscription; std::vector<int> mWarpFieldIntX; std::vector<int> mWarpFieldIntY; std::vector<float> mWarpFieldFrac00; std::vector<float> mWarpFieldFrac01; std::vector<float> mWarpFieldFrac10; std::vector<float> mWarpFieldFrac11; int mOutputWidth; int mOutputHeight; int mJpegQuality; ImageWarper() { mLcmWrapper.reset(new drc::LcmWrapper()); mLcm = mLcmWrapper->get(); mBotWrapper.reset(new drc::BotWrapper(mLcm)); mSubscription = NULL; } void interpolateGray(const cv::Mat& iImage, const int iIndex, std::vector<uint8_t>& oBytes) { int xInt(mWarpFieldIntX[iIndex]), yInt(mWarpFieldIntY[iIndex]); if (xInt < 0) return; float c00(mWarpFieldFrac00[iIndex]), c01(mWarpFieldFrac01[iIndex]); float c10(mWarpFieldFrac10[iIndex]), c11(mWarpFieldFrac11[iIndex]); int pos = yInt*iImage.step+xInt; uint8_t* data = iImage.data; float val00(data[pos]), val10(data[pos+1]); float val01(data[pos+iImage.step]), val11(data[pos+iImage.step+1]); oBytes[iIndex] = (uint8_t)(c00*val00 + c01*val01 + c10*val10 + c11*val11); } void interpolateColor(const cv::Mat& iImage, const int iIndex, std::vector<uint8_t>& oBytes) { int xInt(mWarpFieldIntX[iIndex]), yInt(mWarpFieldIntY[iIndex]); if (xInt < 0) return; float c00(mWarpFieldFrac00[iIndex]), c01(mWarpFieldFrac01[iIndex]); float c10(mWarpFieldFrac10[iIndex]), c11(mWarpFieldFrac11[iIndex]); uint8_t* data = iImage.data; int pos = yInt*iImage.step+xInt*3; for (int k = 0; k < 3; ++k, ++pos) { float val00(data[pos]), val10(data[pos+3]); float val01(data[pos+iImage.step]), val11(data[pos+iImage.step+3]); oBytes[iIndex*3+k] = (uint8_t)(c00*val00 + c01*val01 + c10*val10 + c11*val11); } } void onImage(const lcm::ReceiveBuffer* iBuf, const std::string& iChannel, const bot_core::image_t* iMessage) { // uncompress or copy cv::Mat in; int imgType = CV_8UC3; switch (iMessage->pixelformat) { case bot_core::image_t::PIXEL_FORMAT_MJPEG: in = cv::imdecode(cv::Mat(iMessage->data), -1); if (in.channels() == 3) cv::cvtColor(in, in, CV_RGB2BGR); break; case bot_core::image_t::PIXEL_FORMAT_GRAY: imgType = CV_8UC1; in = cv::Mat(iMessage->height, iMessage->width, imgType, (void*)iMessage->data.data(), iMessage->row_stride); break; case bot_core::image_t::PIXEL_FORMAT_RGB: imgType = CV_8UC3; in = cv::Mat(iMessage->height, iMessage->width, imgType, (void*)iMessage->data.data(), iMessage->row_stride); break; default: std::cout << "error: pixel format not supported\n"; return; } // warp int rowStride = mOutputWidth*in.channels(); std::vector<uint8_t> bytes(mOutputHeight*rowStride); std::fill(bytes.begin(),bytes.end(),0); cv::Mat out(mOutputHeight, mOutputWidth, imgType, (void*)bytes.data(), rowStride); if (in.channels() == 1) { for (size_t i = 0; i < mWarpFieldIntX.size(); ++i) { interpolateGray(in, i, bytes); } } else { for (size_t i = 0; i < mWarpFieldIntX.size(); ++i) { interpolateColor(in, i, bytes); } } // compress bot_core::image_t msg; std::vector<int> params = { cv::IMWRITE_JPEG_QUALITY, mJpegQuality }; if (in.channels() == 3) cv::cvtColor(out, out, CV_RGB2BGR); if (!cv::imencode(".jpg", out, msg.data, params)) { std::cout << "error encoding jpeg image" << std::endl; } msg.size = msg.data.size(); // transmit msg.utime = iMessage->utime; msg.width = mOutputWidth; msg.height = mOutputHeight; msg.row_stride = rowStride; msg.pixelformat = bot_core::image_t::PIXEL_FORMAT_MJPEG; msg.nmetadata = 0; mLcm->publish(mOutputChannel, &msg); std::cout << "re-transmitted image on " << mOutputChannel << std::endl; } void setJpegQuality(const int iQuality) { mJpegQuality = iQuality; } bool setChannels(const std::string& iInputChannel, const std::string& iOutputChannel) { if (mSubscription != NULL) { mLcm->unsubscribe(mSubscription); } mInputChannel = iInputChannel; mOutputChannel = iOutputChannel; BotCamTrans* inputCamTrans = bot_param_get_new_camtrans(mBotWrapper->getBotParam(), mInputChannel.c_str()); if (inputCamTrans == NULL) { std::cout << "error: cannot find camera " << mInputChannel << std::endl; return false; } BotCamTrans* outputCamTrans = bot_param_get_new_camtrans(mBotWrapper->getBotParam(), mOutputChannel.c_str()); if (outputCamTrans == NULL) { std::cout << "error: cannot find camera " << mOutputChannel << std::endl; return false; } int inputWidth = bot_camtrans_get_width(inputCamTrans); int inputHeight = bot_camtrans_get_height(inputCamTrans); mOutputWidth = bot_camtrans_get_width(outputCamTrans); mOutputHeight = bot_camtrans_get_height(outputCamTrans); // precompute warp field mWarpFieldIntX.resize(mOutputWidth*mOutputHeight); mWarpFieldIntY.resize(mWarpFieldIntX.size()); mWarpFieldFrac00.resize(mWarpFieldIntX.size()); mWarpFieldFrac01.resize(mWarpFieldIntX.size()); mWarpFieldFrac10.resize(mWarpFieldIntX.size()); mWarpFieldFrac11.resize(mWarpFieldIntX.size()); Eigen::Isometry3d outputToInput; if (!mBotWrapper->getTransform(mOutputChannel, mInputChannel, outputToInput)) { std::cout << "error: cannot get transform from " << mOutputChannel << " to " << mInputChannel << std::endl; return false; } Eigen::Matrix3d rotation = outputToInput.rotation(); Eigen::Vector3d ray; for (int i = 0, pos = 0; i < mOutputHeight; ++i) { for (int j = 0; j < mOutputWidth; ++j, ++pos) { mWarpFieldIntX[pos] = -1; if (0 != bot_camtrans_unproject_pixel(outputCamTrans, j, i, ray.data())) { continue; } ray = rotation*ray; double pix[3]; if (0 != bot_camtrans_project_point(inputCamTrans, ray.data(), pix)) { continue; } if ((pix[2] < 0) || (pix[0] < 0) || (pix[0] >= inputWidth-1) || (pix[1] < 0) || (pix[1] >= inputHeight-1)) { continue; } mWarpFieldIntX[pos] = (int)pix[0]; mWarpFieldIntY[pos] = (int)pix[1]; double fracX = pix[0] - mWarpFieldIntX[pos]; double fracY = pix[1] - mWarpFieldIntY[pos]; mWarpFieldFrac00[pos] = (1-fracX)*(1-fracY); mWarpFieldFrac01[pos] = (1-fracX)*fracY; mWarpFieldFrac10[pos] = fracX*(1-fracY); mWarpFieldFrac11[pos] = fracX*fracY; } } mLcm->subscribe(mInputChannel, &ImageWarper::onImage, this); return true; } void start() { mLcmWrapper->startHandleThread(true); } }; int main(const int iArgc, const char** iArgv) { int jpegQuality = 90; std::string inputChannel, outputChannel; ConciseArgs opt(iArgc, (char**)iArgv); opt.add(inputChannel, "i", "input_channel", "incoming camera channel to warp"); opt.add(outputChannel, "o", "output_channel", "camera for output warp and publish"); opt.add(jpegQuality, "j", "jpeg_quality", "jpeg quality (1-100)"); opt.parse(); ImageWarper obj; obj.setJpegQuality(jpegQuality); if (!obj.setChannels(inputChannel, outputChannel)) { return -1; } obj.start(); return 1; }
h,w,n = map(int,input().split()) coord = [list(map(int,input().split())) for i in range(n)] num = {} for i in range(n): for j,k in [[0,0],[0,1],[0,-1],[1,0],[1,1],[1,-1],[-1,0],[-1,1],[-1,-1]]: if not(2<=coord[i][0]+j<=h-1) or not(2<=coord[i][1]+k<=w-1): continue if not coord[i][0]+j in num: num[coord[i][0]+j] = {} if not coord[i][1]+k in num[coord[i][0]+j]: num[coord[i][0]+j][coord[i][1]+k] = 1 else: num[coord[i][0]+j][coord[i][1]+k] += 1 ans = [0]*10 for i in num: for value in num[i].values(): ans[value] +=1 print((h-2)*(w-2)-sum(ans)) for i in range(1,10): print(ans[i])
<filename>dev/app/home/home.component.ts "use strict"; import { Router } from "@angular/router"; import { Component, OnInit, Input } from "@angular/core"; import { Observable } from "rxjs/Observable"; import { Subscription } from "rxjs/Subscription"; // services import { User, UserService } from "../Services/users.service"; import { Ad, AdsService } from "../Services/ads.service"; import { AuthService } from "../Services/authentication.service"; import { StandingData, Lists } from "../Services/standing.data.service"; import { PagerService } from "../Services/pager.service"; import { SearchService } from "../Services/search.service"; @Component ({ // selector: "user-list", templateUrl: "home.component.html", styleUrls: [ "home.component.css" ], providers: [ AdsService ] }) export class HomeComponent implements OnInit { private lists: Lists; private ads: Ad[]; private adsCount: number; private loading: boolean; private active: boolean; private error: string; private search: string; private isInSearchResults: boolean; private activeCategory: string; private activeCity: string; private searchAds: Ad[]; private startIdx: number; private count: number; private pager: any = {}; private pagedItems: any[]; private currentPage: number; constructor(private router: Router, private userService: UserService, private adsService: AdsService, private authService: AuthService, private standingData: StandingData, private pagerService: PagerService, private searchService: SearchService) { this.setErrorMsg(""); this.lists = new Lists([], [], []); this.ads = <Ad[]>[]; this.adsCount = 0; this.loadStandingData(); this.searchAds = <Ad[]>[]; this.search = ""; this.isInSearchResults = false; this.activeCategory = ""; this.activeCity = ""; this.startIdx = 0; this.count = 9; this.currentPage = 0; // this.pager = this.pagerService.getPager(this.adsCount || this.count, this.currentPage); // this.getHomeAds(this.startIdx, this.count); } // constructor() public loadStandingData(): void { this.lists = this.standingData.getLists() } // loadStandingData() public getHomeAds(startIdx: number = this.startIdx, count: number = this.count): void { this.adsService.getAds(startIdx, count).subscribe( res => { this.ads = res; this.adsCount = this.adsService.count; this.isInSearchResults = false; this.pager = this.pagerService.getPager(this.adsCount || 0, this.currentPage); this.pagedItems = this.ads; }, error => { this.setErrorMsg("Could not retrieve ads!"); console.error(this.error); } ) } // getHomeAds() private previewAd(id: string): void { if (id) { this.router.navigate([ '/ads/preview', id ]); } } // previewAd() private changeActiveCategory(cat: string): void { if (cat.trim()) { if (cat.trim() !== this.activeCategory) { this.activeCategory = cat.trim(); } else { this.activeCategory = ""; } } } // changeActiveCategory() private changeActiveCity(city: string): void { if (city.trim()) { if (city.trim() !== this.activeCity) { this.activeCity = city.trim(); } else { this.activeCity = ""; } } } // changeActiveCategory() private startSearch(): void { if (this.search.trim()) { console.log(`Entering startSearch()`); this.searchService.searchInAds(this.search.trim(), this.activeCategory, this.activeCity).subscribe( res => { this.searchAds = res; this.adsCount = this.searchService.count; //this.ads = this.searchAds.slice(0, Math.min(this.count - 1, this.adsCount)); console.log(`this.searchAds: ${JSON.stringify(this.searchAds)}`); console.log(`this.adsCount: ${this.adsCount}`); this.pager = this.pagerService.getPager(this.adsCount, 1, this.count); this.displaySeachResults(this.pager.startIndex, this.pager.endIndex); this.isInSearchResults = true; }, error => { this.setErrorMsg("Could not execute search!"); console.error(this.error); } ) } } // startSearch() private displaySeachResults(start: number = this.pager.startIndex, end: number = this.pager.endIndex): void { if (this.searchAds && this.adsCount) { this.pagedItems = this.searchAds.slice(start, end + 1); } else { this.pagedItems = []; } } // displaySeachResults() // private helpers private setErrorMsg(errMsg?: string): void { let msg = errMsg.trim(); if (msg) { this.error = msg; setTimeout(() => this.error = "", 5000 /* ms */); } else this.error = ""; } // setErrorMsg() private setPage(page: number): void { if (page < 1 || page > this.pager.totalPages) { return; } this.pager = this.pagerService.getPager(this.adsCount || this.count, page); // this.pagedItems = this.ads.slice(this.pager.startIndex, this.pager.endIndex + 1); if (this.currentPage !== page) { if (!this.isInSearchResults) { this.getHomeAds(this.pager.startIndex, this.count); } else { this.displaySeachResults(this.pager.startIndex, this.pager.endIndex); } this.currentPage = page; } // this.pager = this.pagerService.getPager(this.dummyItems.length, page); // this.pagedItems = this.dummyItems.slice(this.pager.startIndex, this.pager.endIndex + 1); } // setPage() ngOnInit() { this.loading = false; this.active = true; this.setPage(1); } // ngOnInit() } // class HomeComponent
/************************************ * The BridgeSettingsTelegram class * ************************************/ export interface BridgeSettingsTelegramProperties { chatId: number; sendUsernames: boolean; relayCommands: boolean; relayJoinMessages: boolean; relayLeaveMessages: boolean; crossDeleteOnDiscord: boolean; ignoreCommands?: boolean; } /** * Holds settings for the Telegram part of a bridge */ export class BridgeSettingsTelegram { public chatId: number; public sendUsernames: boolean; public relayJoinMessages: boolean; public relayLeaveMessages: boolean; public crossDeleteOnDiscord: boolean; public relayCommands: boolean; /** * Creates a new BridgeSettingsTelegram object * * @param {Object} settings Settings for the Telegram side of the bridge * @param {Integer} settings.chatId ID of the Telegram chat to bridge * @param {Boolean} settings.relayJoinMessages Whether or not to relay join messages from Telegram to Discord * @param {Boolean} settings.relayLeaveMessages Whether or not to relay leave messages from Telegram to Discord */ constructor(settings: BridgeSettingsTelegramProperties) { // Check that the settings object is valid BridgeSettingsTelegram.validate(settings); /** * ID of the Telegram chat to bridge * * @type {Integer} */ this.chatId = Number.parseInt(settings.chatId.toString()); /** * Whether or not to relay join messages from Telegram to Discord * * @type {Boolean} */ this.relayJoinMessages = settings.relayJoinMessages; /** * Whether or not to relay join messages from Telegram to Discord * * @type {Boolean} */ this.relayLeaveMessages = settings.relayLeaveMessages; /** * Whether or not to send the user's name as part of the messages to Discord * * @type {Boolean} */ this.sendUsernames = settings.sendUsernames; /** * Whether or not to relay messages starting with "/" (commands) * * @type {Boolean} */ this.relayCommands = settings.relayCommands; /** * Whether or not to delete messages when they are edited to be a single dot * * @type {Boolean} */ this.crossDeleteOnDiscord = settings.crossDeleteOnDiscord; } /** * Validates a raw settings object, checking if it is usable for creating a BridgeSettingsTelegram object * * @param {Object} settings The object to validate * * @throws {Error} If the object is not suitable. The error message says what the problem is */ static validate(settings: BridgeSettingsTelegramProperties) { // Check that the settings are indeed in object form if (!(settings instanceof Object)) { throw new Error("`settings` must be an object"); } // Check that relayJoinMessages is a boolean if (Boolean(settings.relayJoinMessages) !== settings.relayJoinMessages) { throw new Error("`settings.relayJoinMessages` must be a boolean"); } // Check that relayLeaveMessages is a boolean if (Boolean(settings.relayLeaveMessages) !== settings.relayLeaveMessages) { throw new Error("`settings.relayLeaveMessages` must be a boolean"); } // Check that sendUsernames is a boolean if (Boolean(settings.sendUsernames) !== settings.sendUsernames) { throw new Error("`settings.sendUsernames` must be a boolean"); } // Check that relayCommands is a boolean if (Boolean(settings.relayCommands) !== settings.relayCommands) { throw new Error("`settings.relayCommands` must be a boolean"); } // Check that crossDeleteOnDiscord is a boolean if (Boolean(settings.crossDeleteOnDiscord) !== settings.crossDeleteOnDiscord) { throw new Error("`settings.crossDeleteOnDiscord` must be a boolean"); } } }
def associative_scan(fn, elems): elems_flat, tree = tree_flatten(elems) def lowered_fn(a_flat, b_flat): a = tree_unflatten(tree, a_flat) b = tree_unflatten(tree, b_flat) c = fn(a, b) c_flat, _ = tree_flatten(c) return c_flat num_elems = int(elems_flat[0].shape[0]) if not all(int(elem.shape[0]) == num_elems for elem in elems_flat[1:]): raise ValueError('Input `Tensor`s must have the same first dimension.' ' (saw: {})'.format([elems.shape for elem in elems_flat])) if num_elems < 2: return elems Consider elements of `_scan(elems)` at odd indices. That's the same as first summing successive pairs of elements of `elems` and performing a scan on that half sized tensor. We perform the latter scan by recursion. Now consider the even elements of `_scan(elems)`. These can be computed from the odd elements of `_scan(elems)` by adding each odd element of `_scan(elems)` to the matching even element in the original `elems`. We return the odd and even elements interleaved. For the base case of the recursion we return the first element of `elems` followed by the sum of the first two elements computed as a (small two-down-to-one) reduction step. def _scan(elems): num_elems = elems[0].shape[0] reduced_elems = lowered_fn([elem[0:-1:2] for elem in elems], [elem[1::2] for elem in elems]) if reduced_elems[0].shape[0] == 1: Base case has either 2 or 3 elements. if num_elems == 2: return [lax.concatenate([elem[0:1], reduced_elem], dimension=0) for (reduced_elem, elem) in zip(reduced_elems, elems)] elif num_elems == 3: reduced_reduced_elems = lowered_fn( reduced_elems, [elem[2:3] for elem in elems]) return [ lax.concatenate([elem[0:1], reduced_elem, reduced_reduced_elem], dimension=0) for (reduced_reduced_elem, reduced_elem, elem) in zip(reduced_reduced_elems, reduced_elems, elems)] Recursively compute scan for partially reduced tensors. odd_elems = _scan(reduced_elems) if num_elems % 2 == 0: results = lowered_fn([odd_elem[:-1] for odd_elem in odd_elems], [elem[2::2] for elem in elems]) else: results = lowered_fn([odd_elem for odd_elem in odd_elems], [elem[2::2] for elem in elems]) The first element of a scan is the same as the first element of the original `elems`. even_elems = [lax.concatenate([elem[0:1], result], dimension=0) for (elem, result) in zip(elems, results)] return tuple(_map(_interleave, even_elems, odd_elems)) scans = _scan(elems_flat) return tree_unflatten(tree, scans)
// writeTo serializes the processing instruction to the writer. func (p *ProcInst) writeTo(w *bufio.Writer, trimmed bool) { w.WriteString("<?") w.WriteString(p.Target) w.WriteByte(' ') w.WriteString(p.Inst) w.WriteString("?>") }
import java.util.*; import java.io.*; public class A110 { public static void main(String[] args) throws IOException{ BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); long n = Long.parseLong(br.readLine()); int l = 0; while(n > 0){ if(n%10 == 4 || n%10 == 7) l++; n= n/10; } if(l ==0){ pln("NO"); return; } while(l>0){ if(l%10 != 4 && l%10 != 7){ pln("NO"); return; } l = l/10; } pln("YES"); } public static <T> void pln(T val){ System.out.println(val); } }
def _compute_binary_classification_accuracy(self, h:torch.tensor, t_binary:torch.tensor): assert h.size(0) == t_binary.size(0) >= 0 assert len(h.size()) == len(t_binary.size()) == 1 if h.size(0) == t_binary.size(0) == 0: acc = 0.0 pre = 0.0 rec = 0.0 else: condition_true = (h==t_binary) condition_false = (h!=t_binary) condition_pos = (h==torch.ones_like(h)) condition_neg = (h==torch.zeros_like(h)) true_pos = torch.where(condition_true & condition_pos, torch.ones_like(h), torch.zeros_like(h)) true_neg = torch.where(condition_true & condition_neg, torch.ones_like(h), torch.zeros_like(h)) false_pos = torch.where(condition_false & condition_pos, torch.ones_like(h), torch.zeros_like(h)) false_neg = torch.where(condition_false & condition_neg, torch.ones_like(h), torch.zeros_like(h)) tp = float(true_pos.sum()) tn = float(true_neg.sum()) fp = float(false_pos.sum()) fn = float(false_neg.sum()) acc = float((tp+tn)/(tp+tn+fp+fn+1e-12)) pre = float(tp/(tp+fp+1e-12)) rec = float(tp/(tp+fn+1e-12)) return acc, pre, rec
Credit: DC Comics Updated October 5, 2017: Bryan Hitch has told Newsarama that he never agreed to write Justice League #32 and #33 despite DC soliciting him as writer for those November issues, and that his final issue will be Justice League #31 as he originally intended. "Justice League #31 is my last issue (Legacy part 6)," Hitch said. "I was never doing the Metal tie-ins on #32 and #33." Original Story: Robert Venditti and Joshua Williamson are stepping in to write Bryan Hitch's penultimate issue of Justice League, according to a notice DC sent out to retailers. Justice League #32 remains scheduled for Novembr 1, and is the second part in the Dark Nights: Metal "Bats Out of Hell" crossover remains scheduled for release November 1. Hitch remains listed for the second part of the title's Dark Nights: Metal tie-in Justice League #33, his last solicited issue before the new regular writer Christopher Priest comes onboard. Here is the updated solicitation: Credit: DC Comics JUSTICE LEAGUE #32 Written by ROBERT VENDITTI & JOSHUA WILLIAMSON Art by LIAM SHARP Connecting covers by ETHAN VAN SCIVER JUSTICE LEAGUE variant cover by HOWARD PORTER Chapter one of this story was solicited in the August Previews in THE FLASH #33 under the title “Justice Insurgence.” A METAL tie-in! “BATS OUT OF HELL” part two! The Justice League have been separated by the dreaded Dark Knights and are forced into twisted Bat-Caves designed to kill them! The Batman Who Laughs and the Murder Machine experiment on Cyborg, seeing how he reacts to the horrors of the Dark Multiverse! Can the Justice League survive their worst nightmares made real?! On sale NOVEMBER 1 • 32 pg, FC, $2.99 US • RATED T
import http from "../utils/http"; export const getHitokotoApi = () => http.get("https://v1.hitokoto.cn/"); export const getWeatherApi = () => http.get( "https://www.tianqiapi.com/api?unescape=1&version=v6&appid=33896864&appsecret=BR49xbsH&city=" + 123 );
#pragma once void test_conversion();
package rds import ( "strings" atlas "github.com/infobloxopen/atlas-db/pkg/apis/db/v1alpha1" "github.com/infobloxopen/atlas-db/pkg/server/mysql" "github.com/infobloxopen/atlas-db/pkg/server/plugin" "github.com/infobloxopen/atlas-db/pkg/server/postgres" ) type RDSPlugin atlas.RDSPlugin func Convert(a *atlas.RDSPlugin) *RDSPlugin { p := RDSPlugin(*a) return &p } func (p *RDSPlugin) Name() string { return "RDS" } func (p *RDSPlugin) DatabasePlugin() plugin.DatabasePlugin { switch strings.ToLower(p.Engine) { case "mysql": return &mysql.MySQLPlugin{} case "postgres": return &postgres.PostgresPlugin{} } return nil } func (p *RDSPlugin) Dsn(userName string, password string, db *atlas.Database, s *atlas.DatabaseServer) string { return p.DatabasePlugin().Dsn(userName, password, nil, s) } func (p *RDSPlugin) SyncCloud(key string, s *atlas.DatabaseServer) error { return nil }
import VnNative3ConsoleCore from "./console"; export default class VnNative3Console extends VnNative3ConsoleCore {}
import unittest import numpy as np from os import path import pickle import tempfile from mutual_information import mf, mf_random class TestMFRandom(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() M = 10 N = 10000 phenotype_list = ['HP:' + str(i + 1) for i in np.arange(M)] self.summary = mf.SummaryXYz(X_names=phenotype_list, Y_names=phenotype_list, z_name='heart failure') np.random.seed(11) self.d = np.random.randint(0, 2, N) self.P = np.random.randint(0, 2, M * N).reshape([N, M]) self.summary.add_batch(self.P, self.P, self.d) self.heart_failure = mf.MutualInfoXYz(self.summary) def test_matrix_searchsorted(self): ordered = np.arange(24).reshape([2, 3, 4]) query = np.array([[-1, 4, 8.5], [13.5, 19, 24]]) idx = mf_random.matrix_searchsorted(ordered, query) expected = [[0, 0, 1], [2, 3, 4]] self.assertEqual(idx.tolist(), expected) def test_create_empirical_distribution(self): diag_prevalence = 0.3 phenotype_prob = np.random.uniform(0, 1, 10) sample_per_simulation = 500 simulations = 100 distribution = mf_random.create_empirical_distribution(diag_prevalence, phenotype_prob, phenotype_prob, sample_per_simulation, simulations) self.assertEqual(list(distribution['mf_XY_omit_z'].shape), [10, 10, 100]) self.assertEqual(list(distribution['mf_Xz'].shape), [10, 100]) self.assertEqual(list(distribution['mf_Yz'].shape), [10, 100]) self.assertEqual(list(distribution['mf_XY_z'].shape), [10, 10, 100]) self.assertEqual(list(distribution['mf_XY_given_z'].shape), [10, 10, 100]) self.assertEqual(list(distribution['synergy'].shape), [10, 10, 100]) def test_p_value_estimate(self): ordered = np.arange(24).reshape([2, 3, 4]) query = np.array([[-1, 4, 8.5], [13.5, 19, 24]]) idx = mf_random.p_value_estimate(query, ordered, alternative='two.sided') expected = [[0, 0.5, 0.5], [1, 0.5, 0]] self.assertEqual(idx.tolist(), expected, 'two.sided p value estimate ' 'failed') idx = mf_random.p_value_estimate(query, ordered, alternative='left') expected = [[0, 0.25, 0.25], [0.5, 1, 1]] self.assertEqual(idx.tolist(), expected, 'left sided p value estimate ' 'failed') idx = mf_random.p_value_estimate(query, ordered, alternative='right') expected = [[1, 1, 0.75], [0.5, 0.25, 0]] self.assertEqual(idx.tolist(), expected, 'right sided p value estimate ' 'failed') self.assertRaises(ValueError, lambda: mf_random.p_value_estimate(query, ordered, alternative='e')) def test_synergy_random(self): disease_prevalence = 0.4 phenotype_prob = np.random.uniform(0, 1, 10) sample_per_simulation = 5000 S = mf_random.synergy_random(disease_prevalence, phenotype_prob, phenotype_prob, sample_per_simulation)['synergy'] np.testing.assert_almost_equal(S, np.zeros(S.shape), decimal=3) def test_serializing_instance(self): cases = sum(self.d) with open(path.join(self.tempdir, 'test_serializing.obj'), 'wb') as \ serializing_file: pickle.dump(self.heart_failure, serializing_file) with open(path.join(self.tempdir, 'test_serializing.obj'), 'rb') as \ serializing_file: deserialized = pickle.load(serializing_file) self.assertEqual(deserialized.z_name, 'heart failure') self.assertEqual(deserialized.case_N, cases) self.assertEqual(deserialized.synergy_XY2z().all(), self.heart_failure.synergy_XY2z().all()) def test_SynergyRandomiserforSynergy(self): randomiser = mf_random.MutualInfoRandomizer(self.heart_failure) # print(self.heart_failure.m1['set1']) # print(self.heart_failure.m2) randomiser.simulate(simulations=100) p_matrix = randomiser.p_values()['synergy'] M = p_matrix.shape[0] # print(p_matrix) # print(np.diagonal(p_matrix)) # print(np.sum(np.triu(p_matrix < 0.05)) / (M * (M - 1) / 2)) self.assertTrue(np.sum(np.triu(p_matrix < 0.05)) < 2 * 0.05 * (M * (M - 1) / 2)) p_matrix = randomiser.p_values('Bonferroni') #print(p_matrix) if __name__ == '__main__': unittest.main()
Dependency related parameters in the reconstruction of a layered software architecture Software architecture reconstruction techniques may be used to understand and maintain software systems, especially in these cases where architectural documentation is outdated or missing. Reconstruction of layers is interesting, since the Layers pattern is commonly used in practice. Layers reconstruction algorithms are based on dependency analysis. In this paper, we define two dependency related parameters and explore their impact on the results of a layer reconstruction algorithm. The first parameter concerns the types of dependencies between software units included in the algorithm. The second parameter concerns the maximum ratio of allowed back-call dependencies between two software units in different layers. By means of experiments on a case system, and by means of conformance checking results, we explain and illustrate the impact of differences in parameter settings. We show that these parameters have a big impact. Consequently, exact specification of used parameter values is relevant in publications. Furthermore, parameter configuration options may be considered to improve tool support.
from django.urls import path from .views import * urlpatterns = [ path('category_create/', CategoryCreate.as_view(), name='category_create'), path('post_create/', post_create, name='post_create'), path('post_details/<int:pk>/', PostDetails.as_view(), name='post_details'), path('post_update/<int:pk>/', PostUpdate.as_view(), name='post_update'), path('post_delete/<int:pk>/', PostDelete.as_view(), name='post_delete'), path('post_list/', PostList.as_view(), name='post_list'), path("post_like/<slug>/", PostLike.as_view(), name='post_like'), path('post_comment/<int:pk>/', PostComment.as_view(), name='post_comment'), ]
// **************************************************************************** // Method: trapezoidAverage // // Purpose: // Return averaged node value over layers by trapezpoid method // // // // Arguments: // a_state layered node variable with size of mesh layers // a_z elevation of every layer, size of mesh layers // a_layerNum number of mesh layers // // **************************************************************************** float trapezoidAverage(float * a_state, float * a_z, const int & a_layerNum) { float sum = 0.0; float half =0.5; for (int iLayer=0;iLayer<a_layerNum-1;iLayer++) { float state1 = a_state[iLayer]; float state2 = a_state[iLayer+1]; float z1 = a_z[iLayer]; float z2 = a_z[iLayer+1]; sum += (state1+state2)*(z2-z1)*half; } float average = sum / (-a_z[0]+a_z[a_layerNum-1]); return average; }
/** * Add a new delay rule to LNet * There is no check for duplicated delay rule, all rules will be checked for * incoming message. */ int lnet_delay_rule_add(struct lnet_fault_attr *attr) { struct lnet_delay_rule *rule; int rc = 0; if (attr->u.delay.la_rate & attr->u.delay.la_interval) { CDEBUG(D_NET, "please provide either delay rate or delay interval, but not both at the same time %d/%d\n", attr->u.delay.la_rate, attr->u.delay.la_interval); return -EINVAL; } if (!attr->u.delay.la_latency) { CDEBUG(D_NET, "delay latency cannot be zero\n"); return -EINVAL; } if (lnet_fault_attr_validate(attr)) return -EINVAL; CFS_ALLOC_PTR(rule); if (!rule) return -ENOMEM; mutex_lock(&delay_dd.dd_mutex); if (!delay_dd.dd_running) { struct task_struct *task; task = kthread_run(lnet_delay_rule_daemon, NULL, "lnet_dd"); if (IS_ERR(task)) { rc = PTR_ERR(task); goto failed; } wait_event(delay_dd.dd_ctl_waitq, delay_dd.dd_running); } setup_timer(&rule->dl_timer, delay_timer_cb, (unsigned long)rule); spin_lock_init(&rule->dl_lock); INIT_LIST_HEAD(&rule->dl_msg_list); INIT_LIST_HEAD(&rule->dl_sched_link); rule->dl_attr = *attr; if (attr->u.delay.la_interval) { rule->dl_time_base = cfs_time_shift(attr->u.delay.la_interval); rule->dl_delay_time = cfs_time_shift(cfs_rand() % attr->u.delay.la_interval); } else { rule->dl_delay_at = cfs_rand() % attr->u.delay.la_rate; } rule->dl_msg_send = -1; lnet_net_lock(LNET_LOCK_EX); atomic_set(&rule->dl_refcount, 1); list_add(&rule->dl_link, &the_lnet.ln_delay_rules); lnet_net_unlock(LNET_LOCK_EX); CDEBUG(D_NET, "Added delay rule: src %s, dst %s, rate %d\n", libcfs_nid2str(attr->fa_src), libcfs_nid2str(attr->fa_src), attr->u.delay.la_rate); mutex_unlock(&delay_dd.dd_mutex); return 0; failed: mutex_unlock(&delay_dd.dd_mutex); CFS_FREE_PTR(rule); return rc; }
/**************************************************************************** * netutils/webserver/httpd_dirlist.c * * Copyright 2019 Sony Home Entertainment & Sound Products Inc. * Author: <NAME> <<EMAIL>> * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * 3. Neither the name NuttX nor the names of its contributors may be * used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************/ /**************************************************************************** * Included Header Files ****************************************************************************/ #include <nuttx/config.h> #include <sys/types.h> #include <sys/stat.h> #include <sys/utsname.h> #include <limits.h> #include <unistd.h> #include <fcntl.h> #include <stdio.h> #include <errno.h> #include <debug.h> #include <string.h> #include <dirent.h> #include <stdlib.h> #include "netutils/httpd.h" #include "httpd.h" /**************************************************************************** * Pre-processor Definitions ****************************************************************************/ #ifdef CONFIG_ARCH_BOARD_CUSTOM # ifndef CONFIG_ARCH_BOARD_CUSTOM_NAME # define BOARD_NAME g_unknown # else # define BOARD_NAME CONFIG_ARCH_BOARD_CUSTOM_NAME # endif #else # ifndef CONFIG_ARCH_BOARD # define BOARD_NAME g_unknown # else # define BOARD_NAME CONFIG_ARCH_BOARD # endif #endif #define BUF_SIZE 1024 #define HEADER \ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n" \ "<html>\n" \ "<head>\n" #define TITLE \ " <title>Index of /%s</title>\n" \ "</head>\n" \ "<body>\n" \ "<h2>Index of /%s</h2>\n" \ "<table>\n" #define TABLE \ " <tr><th valign=\"top\"></th><th>Name</a></th><th>Last modified</a>" \ "</th><th>Size</a></th></tr>\n" \ " <tr><th colspan=\"4\"><hr></th></tr>\n" #define PARENT \ " <tr><td valign=\"top\"></td><td><a href=\"..\">../</a></td>\n" \ " <td align=\"right\"> - </td>" \ "<td align=\"right\"> - </td><td>&nbsp; </td></tr>\n" #define ENTRY \ " <tr><td valign=\"top\"></td><td><a href=\"%s%s\">%s%s</a></td>\n" \ " <td align=\"right\">%4d-%02d-%02d %02d:%02d</td>" \ "<td align=\"right\"> %s </td><td>&nbsp; </td></tr>\n" #define FOOTER \ " <tr><th colspan=\"4\"><hr></th></tr>\n" \ "</table>\n" \ "<address>uIP web server (%s %s %s %s %s) </address>\n" \ "</body>\n" \ "</html>\n" /**************************************************************************** * Public Functions ****************************************************************************/ /**************************************************************************** * Name: httpd_is_file ****************************************************************************/ bool httpd_is_file(FAR const char *filename) { char *path; int fd; bool ret = false; path = malloc(CONFIG_NAME_MAX); ASSERT(path); snprintf(path, CONFIG_NAME_MAX, "%s/%s", CONFIG_NETUTILS_HTTPD_PATH, filename); fd = open(path, O_RDONLY); if (-1 != fd) { close(fd); ret = true; } free(path); return ret; } /**************************************************************************** * Name: httpd_dirlist ****************************************************************************/ ssize_t httpd_dirlist(int outfd, FAR struct httpd_fs_file *file) { struct dirent *dent; struct utsname info; struct stat buf; struct tm tm; ssize_t total; ssize_t ret; char size[16]; char *path; char *tmp; DIR *dir; total = 0; tmp = (char *)calloc(1, BUF_SIZE); ASSERT(tmp); /* exclude document root path */ path = file->path + sizeof(CONFIG_NETUTILS_HTTPD_PATH); /* compose hdr/title/path/table and write them */ if ('\0' == *path) { snprintf(tmp, BUF_SIZE, HEADER TITLE TABLE, "", ""); } else { snprintf(tmp, BUF_SIZE, HEADER TITLE TABLE PARENT, path, path); } ret = write(outfd, tmp, strlen(tmp)); if (-1 == ret) { goto errout; } total += ret; dir = opendir(file->path); if (NULL == dir) { goto errout_with_hdr; } while (true) { dent = readdir(dir); if (NULL == dent) { break; } path = malloc(CONFIG_NAME_MAX); ASSERT(path); snprintf(path, CONFIG_NAME_MAX, "%s/%s", file->path, dent->d_name); /* call stat() to obtain modified time and size */ ret = stat(path, &buf); ASSERT(0 == ret); free(path); localtime_r(&buf.st_mtime, &tm); /* compose an entry name for directory or file */ if (dent->d_type == DTYPE_DIRECTORY) { snprintf(tmp, BUF_SIZE, ENTRY, dent->d_name, "/", dent->d_name, "/", tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday, tm.tm_hour, tm.tm_min, "-" ); } else { snprintf(size, sizeof(size), "%d", buf.st_size); snprintf(tmp, BUF_SIZE, ENTRY, dent->d_name, "", dent->d_name, "", tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday, tm.tm_hour, tm.tm_min, size ); } ret = write(outfd, tmp, strlen(tmp)); if (-1 == ret) { break; } total += ret; } closedir(dir); errout_with_hdr: memset(&info, 0, sizeof(info)); uname(&info); snprintf(tmp, BUF_SIZE, FOOTER, info.sysname, info.release, info.version, info.machine, BOARD_NAME); ret = write(outfd, tmp, strlen(tmp)); if (-1 != ret) { total += ret; } ret = total; errout: free(tmp); return ret; }
<gh_stars>10-100 package tl.lin.lucene.wikipedia; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.IntField; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.Version; import org.wikiclean.WikiClean; import org.wikiclean.WikiCleanBuilder; import org.wikiclean.WikipediaBz2DumpInputStream; public class IndexWikipediaDump { private static final Logger LOG = Logger.getLogger(IndexWikipediaDump.class); public static final Analyzer ANALYZER = new StandardAnalyzer(); static final FieldType TITLE_FIELD_TYPE = new FieldType(); static { TITLE_FIELD_TYPE.setIndexed(true); TITLE_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); TITLE_FIELD_TYPE.setStored(true); TITLE_FIELD_TYPE.setTokenized(true); } static final FieldType TEXT_FIELD_TYPE = new FieldType(); public static enum IndexField { ID("id"), TITLE("title"), TEXT("text"); public final String name; IndexField(String s) { name = s; } }; private static final int DEFAULT_NUM_THREADS = 4; private static final String INPUT_OPTION = "input"; private static final String INDEX_OPTION = "index"; private static final String MAX_OPTION = "maxdocs"; private static final String OPTIMIZE_OPTION = "optimize"; private static final String THREADS_OPTION = "threads"; private static final String INDEX_POSITIONS_OPTION = "indexTermPositions"; private static final String STORE_TEXT_OPTION = "storeText"; @SuppressWarnings("static-access") public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withArgName("path").hasArg() .withDescription("bz2 Wikipedia XML dump file").create(INPUT_OPTION)); options.addOption(OptionBuilder.withArgName("dir").hasArg() .withDescription("index location").create(INDEX_OPTION)); options.addOption(OptionBuilder.withArgName("num").hasArg() .withDescription("maximum number of documents to index").create(MAX_OPTION)); options.addOption(OptionBuilder.withArgName("num").hasArg() .withDescription("number of indexing threads").create(THREADS_OPTION)); options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment")); options.addOption(new Option(INDEX_POSITIONS_OPTION, "indexes term positions in text for phrase queries")); options.addOption(new Option(STORE_TEXT_OPTION, "stores the actual text")); CommandLine cmdline = null; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); System.exit(-1); } if (!cmdline.hasOption(INPUT_OPTION) || !cmdline.hasOption(INDEX_OPTION)) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(IndexWikipediaDump.class.getCanonicalName(), options); System.exit(-1); } String indexPath = cmdline.getOptionValue(INDEX_OPTION); int maxdocs = cmdline.hasOption(MAX_OPTION) ? Integer.parseInt(cmdline.getOptionValue(MAX_OPTION)) : Integer.MAX_VALUE; int threads = cmdline.hasOption(THREADS_OPTION) ? Integer.parseInt(cmdline.getOptionValue(THREADS_OPTION)) : DEFAULT_NUM_THREADS; TEXT_FIELD_TYPE.setIndexed(true); if (cmdline.hasOption(INDEX_POSITIONS_OPTION)) { TEXT_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); } else { TEXT_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS); } if (cmdline.hasOption(STORE_TEXT_OPTION)) { TEXT_FIELD_TYPE.setStored(true); } TEXT_FIELD_TYPE.setTokenized(true); long startTime = System.currentTimeMillis(); String path = cmdline.getOptionValue(INPUT_OPTION); PrintStream out = new PrintStream(System.out, true, "UTF-8"); WikiClean cleaner = new WikiCleanBuilder().withTitle(true).build(); Directory dir = FSDirectory.open(new File(indexPath)); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_4_10_2, ANALYZER); config.setOpenMode(OpenMode.CREATE); IndexWriter writer = new IndexWriter(dir, config); LOG.info("Creating index at " + indexPath); LOG.info("Indexing with " + threads + " threads"); LOG.info("Indexing term positions in main text: " + cmdline.hasOption(INDEX_POSITIONS_OPTION)); LOG.info("Storing text: " + cmdline.hasOption(STORE_TEXT_OPTION)); try { WikipediaBz2DumpInputStream stream = new WikipediaBz2DumpInputStream(path); ExecutorService executor = Executors.newFixedThreadPool(threads); int cnt = 0; String page; while ((page = stream.readNext()) != null) { String title = cleaner.getTitle(page); // These are heuristic specifically for filtering out non-articles in enwiki-20120104. if (title.startsWith("Wikipedia:") || title.startsWith("Portal:") || title.startsWith("File:")) { continue; } // Filtering based on enwiki-20141106 if (title.startsWith("Draft:")) { continue; } if (page.contains("#REDIRECT") || page.contains("#redirect") || page.contains("#Redirect")) { continue; } Runnable worker = new AddDocumentRunnable(writer, cleaner, page); executor.execute(worker); cnt++; if (cnt % 10000 == 0) { LOG.info(cnt + " articles added"); } if (cnt >= maxdocs) { break; } } executor.shutdown(); // Wait until all threads are finish while (!executor.isTerminated()) {} LOG.info("Total of " + cnt + " articles indexed."); if (cmdline.hasOption(OPTIMIZE_OPTION)) { LOG.info("Merging segments..."); writer.forceMerge(1); LOG.info("Done!"); } LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms"); } catch (Exception e) { e.printStackTrace(); } finally { writer.close(); dir.close(); out.close(); } } private static class AddDocumentRunnable implements Runnable { private final IndexWriter writer; private final WikiClean cleaner; private final String page; AddDocumentRunnable(IndexWriter writer, WikiClean cleaner, String page) { this.writer = writer; this.cleaner = cleaner; this.page = page; } @Override public void run() { Document doc = new Document(); doc.add(new IntField(IndexField.ID.name, Integer.parseInt(cleaner.getId(page)), Field.Store.YES)); doc.add(new Field(IndexField.TEXT.name, cleaner.clean(page), TEXT_FIELD_TYPE)); doc.add(new Field(IndexField.TITLE.name, cleaner.getTitle(page), TITLE_FIELD_TYPE)); try { writer.addDocument(doc); } catch (IOException e) { e.printStackTrace(); } } } }
#include<stdio.h> int main() { int n; scanf_s("%d", &n); int charge = 1; if (n % 4 == 0 || n % 7 == 0||n%47==0||n%74==0||n%447==0||n%477==0||n%777==0||n%774==0||n%744==0) charge = 0; if (charge == 1) { printf("NO"); } else printf("YES"); return 0; }
/** * Refresh information of a customer object from database * * @param listObj * @throws IOException */ public void refresh(List<T> listObj) throws IOException { if (listObj == null) { throw new NullPointerException("Cannot refresh null object"); } for (T o : listObj) { refresh(o); } }
// pre-calculated exponents in GF(2^16), missing bottom 3 bits, followed by 128-entry polynomial shift table void gfmat_init() { input_diff = (int8_t*)malloc(32768); gf_exp = (uint16_t*)malloc((8192+128)*2); int exp = 0, n = 1; for (int i = 0; i < 32768; i++) { do { if((exp & 7) == 0) gf_exp[exp>>3] = n; exp++; n <<= 1; if(n > 65535) n ^= 0x1100B; } while( !(exp%3) || !(exp%5) || !(exp%17) || !(exp%257) ); input_diff[i] = exp - i*2; } for (int i = 0; i < 128; i++) { n = i << 9; for (int j = 0; j < 7; j++) { n <<= 1; if(n > 65535) n ^= 0x1100B; } gf_exp[8192+i] = n; } }
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ServiceStatus) DeepCopyInto(out *ServiceStatus) { *out = *in if in.DeploymentStatus != nil { in, out := &in.DeploymentStatus, &out.DeploymentStatus *out = new(appsv1.DeploymentStatus) (*in).DeepCopyInto(*out) } if in.ScaleStatus != nil { in, out := &in.ScaleStatus, &out.ScaleStatus *out = new(ScaleStatus) **out = **in } if in.ScaleFromZeroTimestamp != nil { in, out := &in.ScaleFromZeroTimestamp, &out.ScaleFromZeroTimestamp *out = (*in).DeepCopy() } if in.ObservedScale != nil { in, out := &in.ObservedScale, &out.ObservedScale *out = new(int) **out = **in } if in.WeightOverride != nil { in, out := &in.WeightOverride, &out.WeightOverride *out = new(int) **out = **in } if in.ContainerImages != nil { in, out := &in.ContainerImages, &out.ContainerImages *out = make(map[string]string, len(*in)) for key, val := range *in { (*out)[key] = val } } if in.Conditions != nil { in, out := &in.Conditions, &out.Conditions *out = make([]genericcondition.GenericCondition, len(*in)) copy(*out, *in) } if in.Endpoints != nil { in, out := &in.Endpoints, &out.Endpoints *out = make([]string, len(*in)) copy(*out, *in) } if in.PublicDomains != nil { in, out := &in.PublicDomains, &out.PublicDomains *out = make([]string, len(*in)) copy(*out, *in) } return }
. Tuhfetü'l-Tib is a Turkish medical history periodical published regularly every two weeks in Istanbul since December 27th, 1867. Its collection comprises totally 15 issues, every one of which is consisted of 16 pages with a hard cover, printed at the Maltepe Military Hospital printing press for more than seven months. As far as we know, Tuhfetü'l-Tib's last issue was dated on July 21st, 1868. The aim of its publication was to introduce contemporary medical knowledge by translating articles from the respected academic periodicals in the West. Among the issues wer have studied, twenty chapters were about gynecology, while twenty five ones about the newborn diseases. Tuhfetü'l-Tib is the second Turkish medical review ever printed in Turkey, succeeding Vakayi-i Tibbiye (1849). It is also the first periodical in Turkey, publishing articles on gynecology and pediatrics. Having played an important role in struggle for the Turkish education at the Imperial Medical School, it was issued on the basis of the principles defended by the Ottoman Association of Medicine.
/** * Test of features method, of class MyFeatures. */ @Test public void testFeatures() { Map<String, Boolean> result = MyFeatures.features(); assertEquals(false, result.get(MyFeatures.values()[0].name())); }
DENVER — It was only one game. Something positive, however, emerged from an otherwise forgettable night in Denver. Defensemen Ben Lovejoy and Derrick Pouliot looked comfortable together, and while it was merely one game and coach Mike Johnston has promised to tinker with his lineup, this is a duo that could stick. “I felt great with him,” Lovejoy said. The two had a discussion before playing together for the first time. Lovejoy, the veteran, did most of the talking. His message was simple. “I told him before the game that I’m out there to make him look good,” Lovejoy said. “He doesn’t need to pass me the puck. He has to do his thing, make plays. I’ll always be there to support him and back him up if something goes wrong.” Lovejoy is skating with Pouliot for a reason. While playing in Anaheim for parts of the past three seasons, Lovejoy primarily was partnered with defenseman Cam Fowler. The similarities between Fowler and Pouliot are clear. Fowler was the 12th overall pick in the 2010 NHL Entry Draft, and Pouliot was the eighth overall pick two years later. Both are left-handed, offensive-minded, skilled defensemen. Lovejoy’s presence helped turn Fowler from a slightly disappointing youngster in his first two seasons to an emerging star. “He’s (Pouliot) exactly the kind of player I played with my last three seasons,” Lovejoy said. “I was very lucky to play with Cam Fowler. He’s a great player. I feel like we grew together. I was able to help him, and he was able to help me.” Lovejoy is a defensive defenseman by nature, someone who will play it safe while his partner is given the latitude to take chances. The Penguins want Pouliot to be aggressive and display the offensive skills that turned him into such a high draft pick. In Lovejoy, Pouliot possesses a partner who won’t take chances but who is a good enough skater to play a more skilled style. “I haven’t had a chance to see (Lovejoy) play a lot,” Pouliot said. “But I think it’s going to go well with him. I feel good about it.” Johnston liked what he saw in the duo’s debut. He used Pouliot and Lovejoy as his second pairing, with both seeing almost 20 minutes of ice time. The tandem of Rob Scuderi and Ian Cole was used as the third pairing. “It (Lovejoy and Pouliot together) gives us some balance,” Johnston said. “Lovejoy’s had really good success playing with Fowler. He’s very similar to Pouliot. Nothing is locked in. We may juggle things until we get the right people together.” Pouliot and Lovejoy sound perfectly content staying together. Lovejoy said his transition back to the Penguins has been “seamless” and that he immediately feels more comfortable with the new team that really isn’t new at all. The pairing with Pouliot is new, but Lovejoy seems to possess a strong feeling that it will thrive. And he sees great things in Pouliot. “I was especially impressed with his battle level,” Lovejoy said. “In the corners, he was borderline dominant. You don’t see that a lot out of young, skilled defensemen.” But then, Lovejoy knows how to bring those types along. “I hope we form a bond together and that we do good things together,” Lovejoy said. Josh Yohe is a staff writer for Trib Total Media. Reach him at [email protected] or via Twitter @JoshYohe_Trib. Mary Ann Thomas is a Tribune-Review staff reporter. You can contact Mary at 724-226-4691, [email protected] or via Twitter .
import { CallHandler, ExecutionContext, Injectable, NestInterceptor } from '@nestjs/common'; import { Request, Response } from 'express'; import { Observable } from 'rxjs'; import { tap } from 'rxjs/operators'; import { LoggerService } from '../../services/main/main'; @Injectable() export class LoggerInterceptor implements NestInterceptor { public constructor(private readonly loggerService: LoggerService) {} public intercept(context: ExecutionContext, next: CallHandler): Observable<unknown> { const requestStartTime = Date.now(); const httpContext = context.switchToHttp(); const request: Request = httpContext.getRequest(); const response: Response = httpContext.getResponse(); return next.handle().pipe( tap(() => { const processingTime = Date.now() - requestStartTime; this.loggerService.info('Request finished processing.', { method: request.method, path: request.url, statusCode: response.statusCode, processingTime, }); }), ); } }
Artemisia lanaticapitula (Asteraceae: Tribe Anthemideae), a New Species from Zhejiang, East China Abstract Artemisia lanaticapitula (Asteraceae: Anthemideae), a new species from Zhejiang in East China, is described and illustrated, based on the analyses of morphological observations and molecular phylogenetic evidence. This new species is similar to Artemisia anomala in having simple and undivided leaves, but differs by having phyllaries, corolla limbs, and abaxial leaf surfaces densely white-lanate, an involucre that is semiglobose, 3.5–6 mm in diameter, and capitulate pedunculate at the base. It grows along streams under forests or on roadsides at forest margins; the species is narrowly distributed in Eastern and Southern Zhejiang.
import py from py._path.svnurl import InfoSvnCommand import datetime import time from svntestbase import CommonSvnTests def pytest_funcarg__path1(request): repo, repourl, wc = request.getfuncargvalue("repowc1") return py.path.svnurl(repourl) class TestSvnURLCommandPath(CommonSvnTests): @py.test.mark.xfail def test_load(self, path1): super(TestSvnURLCommandPath, self).test_load(path1) # the following two work on jython but not in local/svnwc def test_listdir(self, path1): super(TestSvnURLCommandPath, self).test_listdir(path1) def test_visit_ignore(self, path1): super(TestSvnURLCommandPath, self).test_visit_ignore(path1) def test_svnurl_needs_arg(self, path1): py.test.raises(TypeError, "py.path.svnurl()") def test_svnurl_does_not_accept_None_either(self, path1): py.test.raises(Exception, "py.path.svnurl(None)") def test_svnurl_characters_simple(self, path1): py.path.svnurl("svn+ssh://hello/world") def test_svnurl_characters_at_user(self, path1): py.path.svnurl("http://[email protected]/some/dir") def test_svnurl_characters_at_path(self, path1): py.test.raises(ValueError, 'py.path.svnurl("http://host.com/foo@bar")') def test_svnurl_characters_colon_port(self, path1): py.path.svnurl("http://host.com:8080/some/dir") def test_svnurl_characters_tilde_end(self, path1): py.path.svnurl("http://host.com/some/file~") @py.test.mark.xfail("sys.platform == 'win32'") def test_svnurl_characters_colon_path(self, path1): # colons are allowed on win32, because they're part of the drive # part of an absolute path... however, they shouldn't be allowed in # other parts, I think py.test.raises(ValueError, 'py.path.svnurl("http://host.com/foo:bar")') def test_export(self, path1, tmpdir): tmpdir = tmpdir.join("empty") p = path1.export(tmpdir) assert p == tmpdir # XXX should return None n1 = [x.basename for x in tmpdir.listdir()] n2 = [x.basename for x in path1.listdir()] n1.sort() n2.sort() assert n1 == n2 assert not p.join('.svn').check() rev = path1.mkdir("newdir") tmpdir.remove() assert not tmpdir.check() path1.new(rev=1).export(tmpdir) for p in tmpdir.listdir(): assert p.basename in n2 class TestSvnInfoCommand: def test_svn_1_2(self): line = " 2256 hpk 165 Nov 24 17:55 __init__.py" info = InfoSvnCommand(line) now = datetime.datetime.now() assert info.last_author == 'hpk' assert info.created_rev == 2256 assert info.kind == 'file' # we don't check for the year (2006), because that depends # on the clock correctly being setup assert time.gmtime(info.mtime)[1:6] == (11, 24, 17, 55, 0) assert info.size == 165 assert info.time == info.mtime * 1000000 def test_svn_1_3(self): line =" 4784 hpk 2 Jun 01 2004 __init__.py" info = InfoSvnCommand(line) assert info.last_author == 'hpk' assert info.kind == 'file' def test_svn_1_3_b(self): line =" 74 autoadmi Oct 06 23:59 plonesolutions.com/" info = InfoSvnCommand(line) assert info.last_author == 'autoadmi' assert info.kind == 'dir' def test_badchars(): py.test.raises(ValueError, "py.path.svnurl('http://host/tmp/@@@:')")
package cn.mzhong.kbus.http.bio; import cn.mzhong.kbus.http.*; import cn.mzhong.kbus.util.StreamUtils; import java.io.IOException; import java.io.InputStream; /** * TODO<br> * 创建时间: 2019/10/28 9:59 * * @author mzhong * @version 1.0 */ public class HttpBioResponseReader { public HttpResponse read(InputStream inputStream) throws IOException { // 读取报文头 byte[] responseLineBytes = StreamUtils.readLine(inputStream); if (responseLineBytes == null) { throw new IOEOFException(); } HttpResponseLine responseLine = HttpResponseLine.parse(responseLineBytes); // 读header HttpHeader header = new HttpHeader(); byte[] lineBytes; while ((lineBytes = StreamUtils.readLine(inputStream)) != null) { if (lineBytes.length == 0) { break; } header.putLine(lineBytes); } return new HttpResponse(new HttpResponseHead(responseLine, header)); } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fireflysemantics.math.arithmetic; import static com.fireflysemantics.math.arithmetic.exception.ArithmeticExceptionKeys.X; import static com.fireflysemantics.math.arithmetic.exception.ArithmeticExceptionKeys.Y; import static com.fireflysemantics.math.exception.ExceptionTypes.MAE__GCD_OVERFLOW_32_BITS; import static com.fireflysemantics.math.exception.ExceptionTypes.MAE__GCD_OVERFLOW_64_BITS; import static com.fireflysemantics.math.exception.ExceptionTypes.MAE__INTEGER_OVERFLOW; import static com.fireflysemantics.math.exception.ExceptionTypes.MAE__LCM_OVERFLOW_32_BITS; import static com.fireflysemantics.math.exception.ExceptionTypes.MAE__LCM_OVERFLOW_64_BITS; import static com.fireflysemantics.math.exception.ExceptionTypes.MAE__LONG_OVERFLOW; import static com.fireflysemantics.math.exception.ExceptionTypes.MAE__OVERFLOW_IN_ADDITION; import static com.fireflysemantics.math.exception.ExceptionTypes.MAE__OVERFLOW_IN_SUBTRACTION; import static com.fireflysemantics.math.exception.ExceptionTypes.NOT_POSITIVE; import java.math.BigInteger; import com.fireflysemantics.math.exception.MathException; /** * Arithmetic utility functions complementing {@link Math}. */ public final class Arithmetic { /** Private constructor. */ private Arithmetic() { super(); } /** * Add two integers, checking for overflow. * * @param x * an addend * @param y * an addend * @return the sum {@code x+y} * @throws MathException * Of type {@code MAE__OVERFLOW_IN_ADDITION} if the result can * not be represented as an {@code int}. */ public static int addAndCheck(int x, int y) throws MathException { long s = (long) x + (long) y; if (s < Integer.MIN_VALUE || s > Integer.MAX_VALUE) { throw new MathException(MAE__OVERFLOW_IN_ADDITION).put(X, x).put(Y, y); } return (int) s; } /** * Add two long integers, checking for overflow. * * @param x * an addend * @param y * an addend * @return the sum {@code x+y} * @throws MathException * Of type {@code MAE__OVERFLOW_IN_ADDITION} if the result can * not be represented as an {@code long}. */ public static long addAndCheck(long x, long y) throws MathException { final long result = x + y; if (!((x ^ y) < 0 | (x ^ result) >= 0)) { throw new MathException(MAE__OVERFLOW_IN_ADDITION).put(X, x).put(Y, y); } return result; } /** * Computes the greatest common divisor of the absolute value of two * numbers, using a modified version of the "binary gcd" method. See Knuth * 4.5.2 algorithm B. The algorithm is due to <NAME> (1961). <br/> * Special cases: * <ul> * <li>The invocations {@code gcd(Integer.MIN_VALUE, Integer.MIN_VALUE)}, * {@code gcd(Integer.MIN_VALUE, 0)} and {@code gcd(0, Integer.MIN_VALUE)} * throw an {@code MathException[MAE__GCD_OVERFLOW_32_BITS]}, because the * result would be 2^31, which is too large for an int value.</li> * <li>The result of {@code gcd(x, x)}, {@code gcd(0, x)} and * {@code gcd(x, 0)} is the absolute value of {@code x}, except for the * special cases above.</li> * <li>The invocation {@code gcd(0, 0)} is the only one which returns * {@code 0}.</li> * </ul> * * @param x * Number. * @param y * Number. * @return the greatest common divisor (never negative). * @throws MathException * Of type {@code MAE__GCD_OVERFLOW_32_BITS} if the result * cannot be represented as a non-negative {@code int} value. */ public static int gcd(int x, int y) throws MathException { int a = x; int b = y; if (a == 0 || b == 0) { if (a == Integer.MIN_VALUE || b == Integer.MIN_VALUE) { throw new MathException(MAE__GCD_OVERFLOW_32_BITS).put(X, x).put(Y, y); } return Math.abs(a + b); } long al = a; long bl = b; boolean useLong = false; if (a < 0) { if (Integer.MIN_VALUE == a) { useLong = true; } else { a = -a; } al = -al; } if (b < 0) { if (Integer.MIN_VALUE == b) { useLong = true; } else { b = -b; } bl = -bl; } if (useLong) { if (al == bl) { throw new MathException(MAE__GCD_OVERFLOW_32_BITS).put(X, x).put(Y, y); } long blbu = bl; bl = al; al = blbu % al; if (al == 0) { if (bl > Integer.MAX_VALUE) { throw new MathException(MAE__GCD_OVERFLOW_32_BITS).put(X, x).put(Y, y); } return (int) bl; } blbu = bl; // Now "al" and "bl" fit in an "int". b = (int) al; a = (int) (blbu % al); } return gcdPositive(a, b); } /** * Computes the greatest common divisor of two <em>positive</em> numbers * (this precondition is <em>not</em> checked and the result is undefined if * not fulfilled) using the "binary gcd" method which avoids division and * modulo operations. See Knuth 4.5.2 algorithm B. The algorithm is due to * <NAME> (1961). <br/> * Special cases: * <ul> * <li>The result of {@code gcd(x, x)}, {@code gcd(0, x)} and * {@code gcd(x, 0)} is the value of {@code x}.</li> * <li>The invocation {@code gcd(0, 0)} is the only one which returns * {@code 0}.</li> * </ul> * * @param x * Positive number. * @param y * Positive number. * @return the greatest common divisor. */ private static int gcdPositive(int x, int y) { if (x == 0) { return y; } else if (y == 0) { return x; } // Make "a" and "b" odd, keeping track of common power of 2. final int aTwos = Integer.numberOfTrailingZeros(x); x >>= aTwos; final int bTwos = Integer.numberOfTrailingZeros(y); y >>= bTwos; final int shift = Math.min(aTwos, bTwos); // "a" and "b" are positive. // If a > b then "gdc(a, b)" is equal to "gcd(a - b, b)". // If a < b then "gcd(a, b)" is equal to "gcd(b - a, a)". // Hence, in the successive iterations: // "a" becomes the absolute difference of the current values, // "b" becomes the minimum of the current values. while (x != y) { final int delta = x - y; y = Math.min(x, y); x = Math.abs(delta); // Remove any power of 2 in "a" ("b" is guaranteed to be odd). x >>= Integer.numberOfTrailingZeros(x); } // Recover the common power of 2. return x << shift; } /** * <p> * Gets the greatest common divisor of the absolute value of two numbers, * using the "binary gcd" method which avoids division and modulo * operations. See Knuth 4.5.2 algorithm B. This algorithm is due to Josef * Stein (1961). * </p> * Special cases: * <ul> * <li>The invocations {@code gcd(Long.MIN_VALUE, Long.MIN_VALUE)}, * {@code gcd(Long.MIN_VALUE, 0L)} and {@code gcd(0L, Long.MIN_VALUE)} throw * an {@code ArithmeticException}, because the result would be 2^63, which * is too large for a long value.</li> * <li>The result of {@code gcd(x, x)}, {@code gcd(0L, x)} and * {@code gcd(x, 0L)} is the absolute value of {@code x}, except for the * special cases above. * <li>The invocation {@code gcd(0L, 0L)} is the only one which returns * {@code 0L}.</li> * </ul> * * @param x * Number. * @param y * Number. * @return the greatest common divisor, never negative. * @throws MathException * Of type {@code MAE__GCD_OVERFLOW_64_BITS} if the result * cannot be represented as a non-negative {@code long} value. */ public static long gcd(final long x, final long y) throws MathException { long u = x; long v = y; if ((u == 0) || (v == 0)) { if ((u == Long.MIN_VALUE) || (v == Long.MIN_VALUE)) { throw new MathException(MAE__GCD_OVERFLOW_64_BITS).put(X, x).put(Y, y); } return Math.abs(u) + Math.abs(v); } // keep u and v negative, as negative integers range down to // -2^63, while positive numbers can only be as large as 2^63-1 // (i.e. we can't necessarily negate a negative number without // overflow) /* assert u!=0 && v!=0; */ if (u > 0) { u = -u; } // make u negative if (v > 0) { v = -v; } // make v negative // B1. [Find power of 2] int k = 0; while ((u & 1) == 0 && (v & 1) == 0 && k < 63) { // while u and v are // both even... u /= 2; v /= 2; k++; // cast out twos. } if (k == 63) { throw new MathException(MAE__GCD_OVERFLOW_64_BITS).put(X, x).put(Y, y); } // B2. Initialize: u and v have been divided by 2^k and at least // one is odd. long t = ((u & 1) == 1) ? v : -(u / 2)/* B3 */; // t negative: u was odd, v may be even (t replaces v) // t positive: u was even, v is odd (t replaces u) do { /* assert u<0 && v<0; */ // B4/B3: cast out twos from t. while ((t & 1) == 0) { // while t is even.. t /= 2; // cast out twos } // B5 [reset max(u,v)] if (t > 0) { u = -t; } else { v = t; } // B6/B3. at this point both u and v should be odd. t = (v - u) / 2; // |u| larger: t positive (replace u) // |v| larger: t negative (replace v) } while (t != 0); return -u * (1L << k); // gcd is u*2^k } /** * <p> * Returns the least common multiple of the absolute value of two numbers, * using the formula {@code lcm(a,b) = (a / gcd(a,b)) * b}. * </p> * Special cases: * <ul> * <li>The invocations {@code lcm(Integer.MIN_VALUE, n)} and * {@code lcm(n, Integer.MIN_VALUE)}, where {@code abs(n)} is a power of 2, * throw an {@code ArithmeticException}, because the result would be 2^31, * which is too large for an int value.</li> * <li>The result of {@code lcm(0, x)} and {@code lcm(x, 0)} is {@code 0} * for any {@code x}. * </ul> * * @param x * Number. * @param y * Number. * @return the least common multiple, never negative. * @throws MathException * Of type {@code MAE__LCM_OVERFLOW_32_BITS} if the result * cannot be represented as a non-negative {@code int} value. */ public static int lcm(int x, int y) throws MathException { if (x == 0 || y == 0) { return 0; } int lcm = Math.abs(Arithmetic.mulAndCheck(x / gcd(x, y), y)); if (lcm == Integer.MIN_VALUE) { throw new MathException(MAE__LCM_OVERFLOW_32_BITS).put(X, x).put(Y, y); } return lcm; } /** * <p> * Returns the least common multiple of the absolute value of two numbers, * using the formula {@code lcm(a,b) = (a / gcd(a,b)) * b}. * </p> * Special cases: * <ul> * <li>The invocations {@code lcm(Long.MIN_VALUE, n)} and * {@code lcm(n, Long.MIN_VALUE)}, where {@code abs(n)} is a power of 2, * throw an {@code ArithmeticException}, because the result would be 2^63, * which is too large for an int value.</li> * <li>The result of {@code lcm(0L, x)} and {@code lcm(x, 0L)} is {@code 0L} * for any {@code x}. * </ul> * * @param x * Number. * @param y * Number. * @return the least common multiple, never negative. * @throws MathException * Of type {@code MAE__LCM_OVERFLOW_64_BITS} if the result * cannot be represented as a non-negative {@code long} value. */ public static long lcm(long x, long y) throws MathException { if (x == 0 || y == 0) { return 0; } long lcm = Math.abs(Arithmetic.mulAndCheck(x / gcd(x, y), y)); if (lcm == Long.MIN_VALUE) { throw new MathException(MAE__LCM_OVERFLOW_64_BITS).put(X, x).put(Y, y); } return lcm; } /** * Multiply two integers, checking for overflow. * * @param x * Factor. * @param y * Factor. * @return the product {@code x * y}. * @throws MathException[MAE__INTEGER_OVERFLOW] * if the result can not be represented as an {@code int}. */ public static int mulAndCheck(int x, int y) throws MathException { long m = ((long) x) * ((long) y); if (m < Integer.MIN_VALUE || m > Integer.MAX_VALUE) { throw new MathException(MAE__INTEGER_OVERFLOW); } return (int) m; } /** * Multiply two long integers, checking for overflow. * * @param x * Factor. * @param y * Factor. * @return the product {@code a * b}. * @throws MathException * Of type {@code MAE__LONG_OVERFLOW} if the result can not be * represented as a {@code long}. */ public static long mulAndCheck(long x, long y) throws MathException { long ret; if (x > y) { // use symmetry to reduce boundary cases ret = mulAndCheck(y, x); } else { if (x < 0) { if (y < 0) { // check for positive overflow with negative a, negative b if (x >= Long.MAX_VALUE / y) { ret = x * y; } else { throw new MathException(MAE__LONG_OVERFLOW); } } else if (y > 0) { // check for negative overflow with negative a, positive b if (Long.MIN_VALUE / y <= x) { ret = x * y; } else { throw new MathException(MAE__LONG_OVERFLOW); } } else { // assert b == 0 ret = 0; } } else if (x > 0) { // assert a > 0 // assert b > 0 // check for positive overflow with positive a, positive b if (x <= Long.MAX_VALUE / y) { ret = x * y; } else { throw new MathException(MAE__LONG_OVERFLOW); } } else { // assert a == 0 ret = 0; } } return ret; } /** * Subtract two integers, checking for overflow. * * @param x * Minuend. * @param y * Subtrahend. * @return the difference {@code x - y}. * @throws MathException * Of type {@code MAE__OVERFLOW_IN_SUBTRACTION} if the result * can not be represented as an {@code int}. */ public static int subAndCheck(int x, int y) throws MathException { long s = (long) x - (long) y; if (s < Integer.MIN_VALUE || s > Integer.MAX_VALUE) { throw new MathException(MAE__OVERFLOW_IN_SUBTRACTION).put(X, x).put(Y, y); } return (int) s; } /** * Subtract two long integers, checking for overflow. * * @param x * Value. * @param y * Value. * @return the difference {@code x - y}. * @throws MathException * Of type {@code MAE__OVERFLOW_IN_SUBTRACTION} if the result * can not be represented as a {@code long}. */ public static long subAndCheck(long x, long y) throws MathException { long ret; if (y == Long.MIN_VALUE) { if (x < 0) { ret = x - y; } else { throw new MathException(MAE__OVERFLOW_IN_SUBTRACTION).put(X, x).put(Y, y); } } else { // use additive inverse try { ret = addAndCheck(x, -y); } catch (MathException e) { MathException me = new MathException(MAE__OVERFLOW_IN_SUBTRACTION).put(X, x).put(Y, y); me.initCause(e); throw me; } } return ret; } /** * Raise an int to an int power. * * @param x * Number to raise. * @param y * Exponent (must be positive or zero). * @return \( x^y \) * @throws MathException * if {@code y < 0}. * @throws MathException * Of type {@code NOT_POSITIVE_EXCEPTION} if the result would * overflow. */ public static int pow(final int x, final int y) throws MathException { if (y < 0) { throw new MathException(NOT_POSITIVE).put(Y, y); } try { int exp = y; int result = 1; int x2y = x; while (true) { if ((exp & 0x1) != 0) { result = mulAndCheck(result, x2y); } exp >>= 1; if (exp == 0) { break; } x2y = mulAndCheck(x2y, x2y); } return result; } catch (MathException e) { MathException me = new MathException(MAE__INTEGER_OVERFLOW).put(X, x).put(Y, y); me.initCause(e); throw me; } } /** * Raise a long to an int power. * * @param x * Number to raise. * @param y * Exponent (must be positive or zero). * @return \( x^y \) * @throws MathException * Of type {@code NOT_POSITIVE} if {@code y < 0}. * @throws MathException * Of type {@code MAE__LONG_OVERFLOW} if the result would * overflow. */ public static long pow(final long x, final int y) throws MathException { if (y < 0) { throw new MathException(NOT_POSITIVE).put(Y, y); } try { int exp = y; long result = 1; long x2y = x; while (true) { if ((exp & 0x1) != 0) { result = mulAndCheck(result, x2y); } exp >>= 1; if (exp == 0) { break; } x2y = mulAndCheck(x2y, x2y); } return result; } catch (MathException e) { MathException me = new MathException(MAE__LONG_OVERFLOW).put(X, x).put(Y, y); me.initCause(e); throw me; } } /** * Raise a BigInteger to an int power. * * @param x * Number to raise. * @param y * Exponent (must be positive or zero). * @return x<sup>y</sup> * @throws MathException * Of type {@code NOT_POSITIVE} if {@code y < 0}. */ public static BigInteger pow(final BigInteger x, int y) throws MathException { if (y < 0) { throw new MathException(NOT_POSITIVE).put(Y, y); } return x.pow(y); } /** * Raise a BigInteger to a long power. * * @param x * Number to raise. * @param y * Exponent (must be positive or zero). * @return x<sup>y</sup> * @throws MathException * Of type {@code NOT_POSITIVE} if {@code e < 0}. */ public static BigInteger pow(final BigInteger x, long y) throws MathException { if (y < 0) { throw new MathException(NOT_POSITIVE).put(Y, y); } BigInteger result = BigInteger.ONE; BigInteger k2p = x; while (y != 0) { if ((y & 0x1) != 0) { result = result.multiply(k2p); } k2p = k2p.multiply(k2p); y >>= 1; } return result; } /** * Raise a BigInteger to a BigInteger power. * * @param x * Number to raise. * @param y * Exponent (must be positive or zero). * @return x<sup>y</sup> * @throws MathException * Of type {@code NOT_POSITIVE} if {@code e < 0}. */ public static BigInteger pow(final BigInteger x, BigInteger y) throws MathException { if (y.compareTo(BigInteger.ZERO) < 0) { throw new MathException(NOT_POSITIVE).put(Y, y); } BigInteger result = BigInteger.ONE; BigInteger x2y = x; while (!BigInteger.ZERO.equals(y)) { if (y.testBit(0)) { result = result.multiply(x2y); } x2y = x2y.multiply(x2y); y = y.shiftRight(1); } return result; } /** * Returns true if the argument is a power of two. * * @param n * the number to test * @return true if the argument is a power of two */ public static boolean isPowerOfTwo(long n) { return (n > 0) && ((n & (n - 1)) == 0); } /** * Returns the unsigned remainder from dividing the first argument by the * second where each argument and the result is interpreted as an unsigned * value. * <p> * This method does not use the {@code long} datatype. * </p> * * @param dividend * the value to be divided * @param divisor * the value doing the dividing * @return the unsigned remainder of the first argument divided by the * second argument. */ public static int remainderUnsigned(int dividend, int divisor) { if (divisor >= 0) { if (dividend >= 0) { return dividend % divisor; } // The implementation is a Java port of algorithm described in the // book // "Hacker's Delight" (section "Unsigned short division from signed // division"). int q = ((dividend >>> 1) / divisor) << 1; dividend -= q * divisor; if (dividend < 0 || dividend >= divisor) { dividend -= divisor; } return dividend; } return dividend >= 0 || dividend < divisor ? dividend : dividend - divisor; } /** * Returns the unsigned remainder from dividing the first argument by the * second where each argument and the result is interpreted as an unsigned * value. * <p> * This method does not use the {@code BigInteger} datatype. * </p> * * @param dividend * the value to be divided * @param divisor * the value doing the dividing * @return the unsigned remainder of the first argument divided by the * second argument. */ public static long remainderUnsigned(long dividend, long divisor) { if (divisor >= 0L) { if (dividend >= 0L) { return dividend % divisor; } // The implementation is a Java port of algorithm described in the // book // "Hacker's Delight" (section "Unsigned short division from signed // division"). long q = ((dividend >>> 1) / divisor) << 1; dividend -= q * divisor; if (dividend < 0L || dividend >= divisor) { dividend -= divisor; } return dividend; } return dividend >= 0L || dividend < divisor ? dividend : dividend - divisor; } /** * Returns the unsigned quotient of dividing the first argument by the * second where each argument and the result is interpreted as an unsigned * value. * <p> * Note that in two's complement arithmetic, the three other basic * arithmetic operations of add, subtract, and multiply are bit-wise * identical if the two operands are regarded as both being signed or both * being unsigned. Therefore separate {@code * addUnsigned}, etc. methods are not provided. * </p> * <p> * This method does not use the {@code long} datatype. * </p> * * @param dividend * the value to be divided * @param divisor * the value doing the dividing * @return the unsigned quotient of the first argument divided by the second * argument */ public static int divideUnsigned(int dividend, int divisor) { if (divisor >= 0) { if (dividend >= 0) { return dividend / divisor; } // The implementation is a Java port of algorithm described in the // book // "Hacker's Delight" (section "Unsigned short division from signed // division"). int q = ((dividend >>> 1) / divisor) << 1; dividend -= q * divisor; if (dividend < 0L || dividend >= divisor) { q++; } return q; } return dividend >= 0 || dividend < divisor ? 0 : 1; } /** * Returns the unsigned quotient of dividing the first argument by the * second where each argument and the result is interpreted as an unsigned * value. * <p> * Note that in two's complement arithmetic, the three other basic * arithmetic operations of add, subtract, and multiply are bit-wise * identical if the two operands are regarded as both being signed or both * being unsigned. Therefore separate {@code * addUnsigned}, etc. methods are not provided. * </p> * <p> * This method does not use the {@code BigInteger} datatype. * </p> * * @param dividend * the value to be divided * @param divisor * the value doing the dividing * @return the unsigned quotient of the first argument divided by the second * argument. */ public static long divideUnsigned(long dividend, long divisor) { if (divisor >= 0L) { if (dividend >= 0L) { return dividend / divisor; } // The implementation is a Java port of algorithm described in the // book // "Hacker's Delight" (section "Unsigned short division from signed // division"). long q = ((dividend >>> 1) / divisor) << 1; dividend -= q * divisor; if (dividend < 0L || dividend >= divisor) { q++; } return q; } return dividend >= 0L || dividend < divisor ? 0L : 1L; } }
/** * Testcase ID=20016 TestCase Description:Test the functionality of "Continue * with Google" button * * @param method */ @Test(dependsOnMethods = { "verifyIfContinueWithGoogleIsPresent_20015" }) public void testContinueWithGoogleButton_20016(Method method) { homepage.launchAndLoginWithCity(method.getName()); signInPage.signInWithGoogle(); signInPage.returnToParentWindowAndCloseSignInPage(); }
/** * Prints a next expression in standard LTL syntax. * * @param nextExpr The next expression to print */ @Override public void visit(Next nextExpr) { builder.append("(X "); nextExpr.subExpr.accept(this); builder.append(")"); }
// String implements the fmt.Stringer. func (sq *SurveyQuestion) String() string { var builder strings.Builder builder.WriteString("SurveyQuestion(") builder.WriteString(fmt.Sprintf("id=%v", sq.ID)) builder.WriteString(", create_time=") builder.WriteString(sq.CreateTime.Format(time.ANSIC)) builder.WriteString(", update_time=") builder.WriteString(sq.UpdateTime.Format(time.ANSIC)) builder.WriteString(", form_name=") builder.WriteString(sq.FormName) builder.WriteString(", form_description=") builder.WriteString(sq.FormDescription) builder.WriteString(", form_index=") builder.WriteString(fmt.Sprintf("%v", sq.FormIndex)) builder.WriteString(", question_type=") builder.WriteString(sq.QuestionType) builder.WriteString(", question_format=") builder.WriteString(sq.QuestionFormat) builder.WriteString(", question_text=") builder.WriteString(sq.QuestionText) builder.WriteString(", question_index=") builder.WriteString(fmt.Sprintf("%v", sq.QuestionIndex)) builder.WriteString(", bool_data=") builder.WriteString(fmt.Sprintf("%v", sq.BoolData)) builder.WriteString(", email_data=") builder.WriteString(sq.EmailData) builder.WriteString(", latitude=") builder.WriteString(fmt.Sprintf("%v", sq.Latitude)) builder.WriteString(", longitude=") builder.WriteString(fmt.Sprintf("%v", sq.Longitude)) builder.WriteString(", location_accuracy=") builder.WriteString(fmt.Sprintf("%v", sq.LocationAccuracy)) builder.WriteString(", altitude=") builder.WriteString(fmt.Sprintf("%v", sq.Altitude)) builder.WriteString(", phone_data=") builder.WriteString(sq.PhoneData) builder.WriteString(", text_data=") builder.WriteString(sq.TextData) builder.WriteString(", float_data=") builder.WriteString(fmt.Sprintf("%v", sq.FloatData)) builder.WriteString(", int_data=") builder.WriteString(fmt.Sprintf("%v", sq.IntData)) builder.WriteString(", date_data=") builder.WriteString(sq.DateData.Format(time.ANSIC)) builder.WriteByte(')') return builder.String() }
def walk_nodes(self, where="/", classname=None): class_ = get_class_by_name(classname) if class_ is Group: yield from self.walk_groups(where) elif class_ is Node: yield self.get_node(where) for group in self.walk_groups(where): yield from self.iter_nodes(group) else: for group in self.walk_groups(where): yield from self.iter_nodes(group, classname)
from airflow.utils.decorators import apply_defaults from typing import List from airflow.models import BaseOperator from airflow.contrib.hooks.aws_dynamodb_hook import AwsDynamoDBHook from airflow.hooks.S3_hook import S3Hook from decimal import Decimal import boto3 import json import logging class S3ToDynamoDBOperator(BaseOperator): """ Uploads json data from S3 to a DynamoDB table. :param table_name: Dynamodb table to replicate data to :type table_name: str :param table_keys: partition key and sort key :type table_keys: list :param aws_conn_id: aws connection :type aws_conn_id: str :param region_name: aws region name (example: us-east-1) :type region_name: str :param s3_key: s3 object key :type s3_key: str :param json_key: Key for json list :type json_key: str """ template_fields = ("table_name", "json_key", "s3_key" ) @apply_defaults def __init__(self, table_name, table_keys, region_name, s3_key, json_key, aws_conn_id = 'aws_default', *args, **kwargs): super().__init__(*args, **kwargs) self.table_name = table_name self.table_keys = table_keys self.aws_conn_id = aws_conn_id self.region_name = region_name self.s3_key = s3_key self.json_key = json_key def _convert_float_to_decimal(self, json_list): for json_obj in json_list: val = str(json_obj["sentiment"]) json_obj["sentiment"] = Decimal(val) return json_list def execute(self, context): s3 = S3Hook(aws_conn_id=self.aws_conn_id) dynamodb = AwsDynamoDBHook(aws_conn_id=self.aws_conn_id, table_name=self.table_name, table_keys=self.table_keys, region_name=self.region_name) if not s3.check_for_key(self.s3_key): raise AirflowException( "The source key {0} does not exist".format(self.s3_key)) s3_key_object = s3.get_key(self.s3_key) s3_key_json = json.loads(s3_key_object.get()['Body'].read().decode('utf-8')) json_list = s3_key_json[self.json_key] json_list = self._convert_float_to_decimal(json_list) logging.info('Inserting rows into dynamodb table %s', self.table_name) dynamodb.write_batch_data(json_list) logging.info('Finished inserting %d rows into dynamodb table %s', len(json_list), self.table_name)
# -*- coding: UTF-8 -*- # Author: <NAME> # E-mail: <EMAIL> # Update time: 2021.03.05 from __future__ import print_function import time import matplotlib.pyplot as plt import Table from pit import PIT from forward import FORWARD ''' data = {'type': 'data', # 'interest_ID': 0, 'consumer_ID': 0, 'route_ID': 0, 'content_name': 'r0/0', 'content_data': '', 'data_hop': 0, 'start_time': 0.0 } ''' class DATA(): def __init__(self): self.data = {} def Create_data(self, route_ID, interest): ''' interest = {'type': 'interest', 'interest_ID': 0, 'consumer_ID': 0, 'route_ID': 0, 'content_name': 'r0/0', 'interest_hop': 0, 'life_hop': 5, 'start_time': 0.0} data = {'type': 'data', 'consumer_ID': 0, 'route_ID': 0, 'content_name': 'r0/0', 'content_data': '', 'data_hop': 0, 'start_time': 0.0, 'path': ''} ''' data = {'type': 'data', 'consumer_ID': 0, 'route_ID': 0, 'content_name': 'r0/0', 'content_data': '', 'data_hop': 0, 'start_time': 0, 'path': ''} self.data = data self.data['type'] = 'data' self.data['consumer_ID'] = interest['consumer_ID'] self.data['route_ID'] = route_ID self.data['content_name'] = interest['content_name'] content = '' # plt.imread('lena.png') # plt.imshow(content, cmap=plt.cm.binary) # plt.show() self.data['content_data'] = content self.data['data_hop'] = 0 self.data['start_time'] = interest['start_time'] self.data['path'] = '' return self.data def Send_data(self, Infaces, route_ID, data): ''' data = {'type': 'data', 'consumer_ID': 0, 'route_ID': 0, 'content_name': 'r0/0', 'content_data': '', 'data_hop': 0, 'start_time': 0.0} ''' Datas = [] data['data_hop'] += 1 data['route_ID'] = route_ID self.data['path'] += str(route_ID) + '/' # print(Infaces) for i in range(len(Infaces)): # print(' i= ' + str(i)) Datas.append([Infaces[i], data]) return Datas # data packet processing def On_data(self, inface, route_ID, data, tables): ''' data = {'type': 'data', 'consumer_ID': 0, 'route_ID': 0, 'content_name': 'r0/0', 'content_data': '', 'data_hop': 0, 'start_time': 0.0} ''' Pit = PIT() Forward = FORWARD() network, ps, pit, fib = tables #print(data) #print('') consumer_ID = data['consumer_ID'] # Check whether there is an entry matching the content name of the data packet in the pit PIT_search_ACK = Pit.Search_pit_data(pit, data) # data match in PIT if PIT_search_ACK: ############################################################ # CS_cache_data(inface, data) # FIB_update_outface(inface, route_ID, data) ############################################################ if consumer_ID != route_ID: Infaces = Forward.Forward_data(pit, data) Datas = self.Send_data(Infaces, route_ID, data) Pit.Remove_pit_entry(pit, data) print('data hit in PIT') print(Datas) return Datas else: print('YES consumer') packet = [] return packet # data miss in PIT else: # fib_data(inface, data) print('data miss in PIT') self.Drop_data(inface, data) packet = [] return packet def Drop_data(self, inface, data): print('Drop_data') if __name__ == '__main__': # Create_data(inface, route_ID= 'r0', interest = ['i0', 'c0', 'r0', 'r1/1', 10., 100.]) # On_data(inface= 'r0',route_ID= 'r0', data= ['d','i0', 'c0', 'r0', 'r1/1', 10., 100., 1.]) print('data')
A Study on the Plant-Input-Mapping Discretization for Nonlinear Control Systems Utilizing Feedback Linearization This study attempts to extend a digital redesign method, called the Plant-Input-Mapping (PIM) method, to nonlinear control systems. The PIM method in the linear case can preserve the stability of the underlying continuous-time feedback control system without respect to the selection of the sampling interval. The PIM method is a model-based approach, and thus, the key of the extension scheme is to overcome the absence of the exact discrete-time model for nonlinear systems. In this paper, we propose a PIM method incorporating the feedback linearization. The proposed method can discretize the continuous-time controller designed by using the linearization technique, which is numerically verified through an example of a robotic manipulator with a flexible joint.
// PortForward creates a port forward. func (c *grpcServer) PortForward(ctx context.Context, in *proto.PortForwardRequest) (*proto.PortForwardResponse, error) { req, err := convertToPortForwardRequest(in) if err != nil { return nil, err } pfResp, err := c.service.PortForward(ctx, *req) if err != nil { return nil, err } resp := &proto.PortForwardResponse{ PortForwardID: pfResp.ID, PortNumber: uint32(pfResp.Port), } return resp, nil }
/* * Copyright 2017 HugeGraph Authors * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.baidu.hugegraph.base; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import com.baidu.hugegraph.exception.ToolsException; public class RetryManager extends ToolManager { private static int threadsNum = Math.min(10, Math.max(4, Runtime.getRuntime().availableProcessors() / 2)); private final ExecutorService pool = Executors.newFixedThreadPool(threadsNum); private final Queue<Future<?>> futures = new ConcurrentLinkedQueue<>(); private int retry = 0; public RetryManager(ToolClient.ConnectionInfo info, String type) { super(info, type); } public <R> R retry(Supplier<R> supplier, String description) { int retries = 0; R r = null; do { try { r = supplier.get(); } catch (Exception e) { if (retries == this.retry) { throw new ToolsException( "Exception occurred while %s(after %s retries)", e, description, this.retry); } // Ignore exception and retry continue; } break; } while (retries++ < this.retry); return r; } public void submit(Runnable task) { this.futures.add(this.pool.submit(task)); } public void awaitTasks() { Future<?> future; while ((future = this.futures.poll()) != null) { try { future.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } } public void shutdown(String taskType) { this.pool.shutdown(); try { this.pool.awaitTermination(24, TimeUnit.HOURS); } catch (InterruptedException e) { throw new ToolsException( "Exception appears in %s threads", e, taskType); } } public int retry() { return this.retry; } public void retry(int retry) { this.retry = retry; } public static int threadsNum() { return threadsNum; } }
package cluster import ( "bytes" "fmt" "github.com/ligao-cloud-native/xwc-controller-agent/cluster/execute" "github.com/ligao-cloud-native/xwc-controller-agent/pkg/types" "os" "strconv" "strings" ) var ClusterName = os.Getenv("PWC_NAME") var ( cmdStatusOk = " 1" cmdStatusFailed = " 2" logInstallCmd = "2>&1 |tee /opt/install.log" logInstallAppendCmd = "2>&1 |tee /opt/install.log" logResetCmd = "2>&1 | tee /opt/reset.log" logJoinCmd = "2>&1 | tee /opt/join.log" logRemoveCmd = "2>&1 | tee /opt/remove.log" logScaleCmd = "2>&1 | tee /opt/scale.log" env = "export PKG_SERVER=%s && export CLUSTER_NAME=" + ClusterName envClusterName = "export CLUSTERNAME=" + ClusterName envKubeConfig = "export KUBECONFIG=/etc/kubernetes/admin.conf && " + envClusterName sshAuthCmd = "curl -k %s/sshd.tar.gz | tar zxv -C /tmp/ && chmod +x /tmp/sshd/exec.sh && sh /tmp/sshd/exec.sh" etcdCleanCmd = "curl -k %s/etcd-installer/clean-etcd.sh | sh -s -- -y " + logResetCmd etcdDownlodPkgCmd = env + " && curl -k %s/install-etcd | sh " + logInstallCmd etcdInstallCmd = env + " && curl -k %s/install-etcd | %s sh -s init " + logInstallAppendCmd k8sInstallCmd = env + "&& curl -k %s/install | %s sh -s init %s " + logInstallCmd k8sScaleNodeCmd = env + "&& curl -k %s/install | sh -s worker --init %s " + logScaleCmd k8sPrepareCmd = env + "&& curl -k %s/install | sh -s prepare %s " + logInstallAppendCmd k8sResetCmd = "curl -k %s/pks-installer/k8s/reset-node.sh | sh -s -- -y " + logResetCmd k8sMasterLableCmd = envKubeConfig + " && /usr/local/bin/kubectl lable node %s %s" k8sJoinTokenCmd = `/usr/local/bin/kubeadm token create --print-join-command |awk '{print $3" "$5" "$7}'` k8sJoinMasterCmd = envClusterName + " && cd /tmp/pks-installer/k8s/ && ./install.sh master %s " + logJoinCmd k8sJoinWorkerCmd = envClusterName + " && cd /tmp/pks-installer/k8s/ && ./install.sh worker %s %s" + logJoinCmd k8sMasterDeleteCmd = " && /usr/local/bin/kubectl delete node %s" k8sMasterDrainCmd = " && /usr/local/bin/kubectl drain %s --delete-local-data --force --ignore-daemonsets" k8sMasterCleanCmd = envKubeConfig + k8sMasterDrainCmd + k8sMasterDeleteCmd k8sNodeCleanCmd = "curl -k %s/pks-installer/k8s/reset-node.sh | sh -s -- -y " + logRemoveCmd callbackCmd = "curl -s -k %s/pks-installer/k8s/callback.sh |sh -s " callbackInstallCmd = callbackCmd + "install " + ClusterName + " %s" callbackResetCmd = callbackCmd + "reset " + ClusterName callbackRemoveCmd = callbackCmd + "remove " + ClusterName callbackScaleCmd = callbackCmd + "scale " + ClusterName ) func EtcdCmdParam(node []execute.ClusterNodes) string { cmdParam := "" for _, node := range node { if node.Role == execute.NodeRoleEtcd { i := 0 cmdParam = cmdParam + " HOST" + strconv.Itoa(i) + "=" + node.IP i++ } } return cmdParam } func CniK8sverRuntimeCmdParam(node []execute.ClusterNodes) string { // network cmd := " --cni" + "networkType" + ":" + "networkVersion" cmd = cmd + " --cni-extra-args \"" + "cniExtraArgsKey cniExtraArgsValue" + "\"" //k8s versiom cmd = cmd + " --k8s-version " + "k8sVersion" // runtime cmd = cmd + " -rt " + "runtimeType" + "-rv " + "runtimeVersion" cmd = cmd + " --runtime-extra-args \"" + "runtimeExtraArgsKey runtimeExtraArgsValue" + "\"" return cmd } func InitNodeDownloadPkgCmd(nodes []execute.ClusterNodes) string { cmd := "" // network calico_ipip=always cmd = cmd + " --calico-ipip" // network kube proxy mode cmd = cmd + "--kube-proxy-mode" + "kubeProxyMode" // loadbance cmd = cmd + "-l " + "loadbanceIP" + " -p" + "loadbancePort" // master ip for _, node := range nodes { if node.Role == execute.NodeRoleMaster { cmd = cmd + " -m " + node.IP } } // cert sans cmd = cmd + " -san " + "CertSan1" + " -san " + "CertSan2" // addon cmd = cmd + "-ad " + "addonType1" + "/" + "addon1" + "-ad " + "addonType2" + "/" + "addon2" return cmd } func CallBackCmd(opt types.OperatorType, optParam interface{}, pkgServer string) string { switch opt { case types.InstallOperatorType: if nodeObj, ok := optParam.(types.Nodes); ok { var nodeIp bytes.Buffer for _, master := range nodeObj.Masters { nodeIp.WriteString(master.IP + "#") } for _, worker := range nodeObj.Workers { nodeIp.WriteString(worker.IP + "#") } return fmt.Sprintf(callbackInstallCmd, pkgServer, strings.TrimRight(nodeIp.String(), "#")) } return fmt.Sprintf(callbackInstallCmd, pkgServer) case types.ScaleOperatorType: if nodeObj, ok := optParam.(types.Nodes); ok { var nodeIp bytes.Buffer for _, worker := range nodeObj.Workers { nodeIp.WriteString(worker.IP + "#") } return fmt.Sprintf(callbackScaleCmd, pkgServer, strings.TrimRight(nodeIp.String(), "#")) } return fmt.Sprintf(callbackInstallCmd, pkgServer) case types.ResetOperatorType: cmdParam := cmdStatusFailed if boolOjb, ok := optParam.(bool); ok && boolOjb { cmdParam = cmdStatusOk } return fmt.Sprintf(callbackResetCmd + cmdParam, pkgServer) case types.RemoveOperatorType: cmdParam := cmdStatusFailed if boolOjb, ok := optParam.(bool); ok && boolOjb { cmdParam = cmdStatusOk } return fmt.Sprintf(callbackRemoveCmd + cmdParam, pkgServer) } return "" }
package tk import ( "fmt" "github.com/jinzhu/gorm" _ "github.com/jinzhu/gorm/dialects/postgres" "os" ) var db *gorm.DB func init() { var err error params := fmt.Sprintf("host=%s user=%s dbname=%s sslmode=disable", os.Getenv("DB_HOST"), os.Getenv("DB_USER"), os.Getenv("DB_NAME")) db, err = gorm.Open("postgres", params) if err != nil { fmt.Println(err) panic("failed to connect to database") } if os.Getenv("APP_ENV") == "development" { db.LogMode(true) } fmt.Println("Database connection initialized") performMigrations() autoSuggestInit() }
<gh_stars>0 # Returns a valid response when request's |referrer| matches |referrer_policy|. def main(request, response): referrer = request.headers.get("referer", None) referrer_policy = request.GET.first("referrer_policy") source_origin = request.GET.first("source_origin") is_cross_origin = request.GET.first("is_cross_origin", False) response_headers = [("Content-Type", "text/javascript"), ("Access-Control-Allow-Origin", source_origin)]; # When the referrer policy is "no-referrer", the referrer header shouldn't # be sent. if referrer_policy == "no-referrer" and not referrer: return (200, response_headers, "") # When the referrer policy is "origin", the referrer header should contain # only the origin. Note that |referrer| contains a trailing slash, while # |source_origin| doesn't. if referrer_policy == "origin" and referrer == source_origin + "/": return (200, response_headers, "") # When the referrer policy is "same-origin", the referrer header should be # sent only for a same-origin request. if referrer_policy == "same-origin": if is_cross_origin and not referrer: return (200, response_headers, "") if not is_cross_origin and referrer: return (200, response_headers, "") return (404)
1 of 6 Oscar Baldizon/Getty Images Luke Walton intimately knows the full value of the three ball. During his two seasons spent as a Golden State Warriors assistant, he saw them pace the league in three-point makes and percentage, all while making a pair of NBA Finals appearances and snapping a 40-year title drought. While he couldn't bring the Splash Brothers with him to SoCal, Walton did have a Warriors-esque offensive blueprint packed in his belongings. "The floor should be spaced with free-flowing ball movement going side-to-side," Walton said in October, per Mark Medina of the Los Angeles Daily News. "If you're a shooter and you're open, I want you to shoot it." Problem is, there just aren't many shooters. The Lakers were, at best, mediocre outside—19th in makes, 22nd in percentage—and their top four marksmen are either already gone (Lou Williams, Jose Calderon) or entering free agency (Nick Young, Tyler Ennis). D'Angelo Russell led the under-contract Lakers with a 35.2 three-point percentage. Reggie Bullock buried 38.4 percent of his long-range looks a year after converting 41.5 percent. The former first-round pick took a while to find his NBA calling, but he made noticeable strides as a substitute sniper for the Detroit Pistons over the past two seasons. That said, they might not spend big to keep him. They'll have to take care of rising 3-and-D stud Kentavious Caldwell-Pope first, and former No. 8 pick Stanley Johnson should get the lion's share of developmental minutes on the wing. Bullock has only played 147 games over four seasons. Even if his shooting attracts suitors, it's almost impossible to imagine his bidding getting out of hand.
package gotau import "time" // Event is the common parts of every event coming from TAU type Event struct { ID string `json:"id"` EventID string `json:"event_id"` EventType string `json:"event_type"` EventSource string `json:"event_source"` Created Time `json:"created"` Origin string `json:"origin"` } // FollowMsg is a message representing a follow event that TAU sends type FollowMsg struct { *Event EventData struct { UserName string `json:"user_name"` UserID string `json:"user_id"` UserLogin string `json:"user_login"` BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` } `json:"event_data"` } // StreamUpdateMsg is a message that represents a stream update event that TAU sends type StreamUpdateMsg struct { *Event EventData struct { Title string `json:"title"` Language string `json:"language"` IsMature bool `json:"is_mature"` CategoryID int `json:"category_id"` CategoryName string `json:"category_name"` BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` } `json:"event_data"` } // CheerMsg is a message that represents a cheer event that TAU sends type CheerMsg struct { *Event EventData struct { IsAnonymous bool `json:"is_anonymous"` UserID string `json:"user_id"` UserName string `json:"user_name"` UserLogin string `json:"user_login"` BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` Bits int `json:"bits"` Message string `json:"message"` } `json:"event_data"` } // RaidMsg is a message that represents a raid event that TAU sends type RaidMsg struct { *Event EventData struct { FromBroadcasterName string `json:"from_broadcaster_user_name"` FromBroadcasterID string `json:"from_broadcaster_user_id"` FromBroadcasterLogin string `json:"from_broadcaster_user_login"` ToBroadcasterName string `json:"to_broadcaster_user_name"` ToBroadcasterID string `json:"to_broadcaster_user_id"` ToBroadcasterLogin string `json:"to_broadcaster_user_login"` Viewers int `json:"viewers"` } `json:"event_data"` } // SubscriptionMsg is a message that represents a subscription event that TAU sends type SubscriptionMsg struct { *Event EventData struct { Type string `json:"type"` Data struct { Topic string `json:"topic"` Message struct { BenefitEndMonth int `json:"benefit_end_month"` UserName string `json:"user_name"` DisplayName string `json:"display_name"` ChannelName string `json:"channel_name"` UserID string `json:"user_id"` ChannelID string `json:"channel_id"` Time time.Time `json:"time"` SubPlan string `json:"sub_plan"` SubPlanName string `json:"sub_plan_name"` Months int `json:"months"` CumulativeMonths int `json:"cumulative_months"` Context string `json:"context"` IsGift bool `json:"is_gift"` MultiMonthDuration int `json:"multi_month_duration"` StreakMonths int `json:"streak_months"` SubMessage struct { Message string `json:"message"` Emotes []struct { Start int `json:"start"` End int `json:"end"` ID int `json:"id"` } `json:"emotes"` } `json:"sub_message"` } `json:"message"` } `json:"data"` } `json:"event_data"` } // HypeTrainBeginMsg is a message that represents a hype train begin event that TAU sends type HypeTrainBeginMsg struct { *Event EventData struct { BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` Total int `json:"total"` Progress int `json:"progress"` Goal int `json:"goal"` StartedAt time.Time `json:"started_at"` ExpiresAt time.Time `json:"expires_at"` TopContributions []struct { UserID string `json:"user_id"` UserLogin string `json:"user_login"` UserName string `json:"user_name"` Type string `json:"type"` Total int `json:"total"` } `json:"top_contributions"` LastContribution struct { UserID string `json:"user_id"` UserLogin string `json:"user_login"` UserName string `json:"user_name"` Type string `json:"type"` Total int `json:"total"` } `json:"last_contribution"` } `json:"event_data"` } // HypeTrainProgressMsg is a message that represents a hype train progress event that TAU sends type HypeTrainProgressMsg struct { *Event EventData struct { BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` Level int `json:"level"` Total int `json:"total"` Progress int `json:"progress"` Goal int `json:"goal"` StartedAt time.Time `json:"started_at"` ExpiresAt time.Time `json:"expires_at"` TopContributions []struct { UserID string `json:"user_id"` UserLogin string `json:"user_login"` UserName string `json:"user_name"` Type string `json:"type"` Total int `json:"total"` } `json:"top_contributions"` LastContribution struct { UserID string `json:"user_id"` UserLogin string `json:"user_login"` UserName string `json:"user_name"` Type string `json:"type"` Total int `json:"total"` } `json:"last_contribution"` } `json:"event_data"` } // HypeTrainEndedMsg is a message that represents a hype train end event that TAU sends type HypeTrainEndedMsg struct { *Event EventData struct { BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` Level int `json:"level"` Total int `json:"total"` Progress int `json:"progress"` StartedAt time.Time `json:"started_at"` EndedAt time.Time `json:"ended_at"` CooldownEndsAt time.Time `json:"cooldown_ends_at"` TopContributions []struct { UserID string `json:"user_id"` UserLogin string `json:"user_login"` UserName string `json:"user_name"` Type string `json:"type"` Total int `json:"total"` } `json:"top_contributions"` } `json:"event_data"` } // StreamOnlineMsg is a message that represents a stream online event that TAU sends type StreamOnlineMsg struct { *Event EventData struct { ID string `json:"id"` BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` Type string `json:"type"` StartedAt time.Time `json:"started_at"` } `json:"event_data"` } // StreamOfflineMsg is a message that represents a stream offline event that TAU sends type StreamOfflineMsg struct { *Event EventData struct { BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` } `json:"event_data"` } // PointsRedemptionMsg is a message that represents a points redemption event that TAU sends type PointsRedemptionMsg struct { *Event EventData struct { BroadcasterID string `json:"broadcaster_user_id"` BroadcasterName string `json:"broadcaster_user_name"` BroadcasterLogin string `json:"broadcaster_user_login"` ID string `json:"id"` UserID string `json:"user_id"` UserLogin string `json:"user_login"` UserName string `json:"user_name"` UserInput string `json:"user_input"` Status string `json:"status"` RedeemedAt time.Time `json:"redeemed_at"` Reward struct { ID string `json:"id"` Title string `json:"title"` Prompt string `json:"prompt"` Cost int `json:"cost"` } `json:"reward"` } `json:"event_data"` }
<reponame>breadhead/use-query import * as React from 'react' import { AppComponentType, AppProps, DefaultAppIProps, NextAppContext, } from 'next/app' import { QueryContextProvider } from '../QueryContextProvider' type NextProps = AppProps & DefaultAppIProps interface QueryProps { query: any } export const nextWithQuery = (Application: AppComponentType<NextProps>) => { return class AppWithQuery extends React.Component<NextProps & QueryProps> { public static async getInitialProps(appContext: NextAppContext) { let appProps = {} if (typeof Application.getInitialProps === 'function') { appProps = await Application.getInitialProps(appContext) } const { query } = appContext.router return { ...appProps, query } } public render() { const { query, ...appProps } = this.props return ( <QueryContextProvider initial={query}> <Application {...appProps} /> </QueryContextProvider> ) } } }
/* * Timer.h * * Created on: 16.10.2015 * Author: cem */ #ifndef OSAL_INCLUDE_TIMER_H_ #define OSAL_INCLUDE_TIMER_H_ #include "scmRTOS.h" namespace OSAL { class Timer { public: static void Sleep(uint16_t timeout) { OS::TBaseProcess::sleep(delay_us(timeout)); } static void SleepMS(uint16_t ms) { OS::TBaseProcess::sleep(delay_ms(ms)); } static void SleepSeconds(uint16_t second) { OS::TBaseProcess::sleep(delay_sec(second)); } static uint_fast32_t GetTime() { return OS::get_tick_count(); } static uint_fast32_t GetBootMS() { return OS::get_tick_count() / delay_ms(1); } static uint_fast32_t GetBootUS() { return OS::get_tick_count() / delay_us(1); } }; } /* namespace OSAL */ #endif /* OSAL_INCLUDE_TIMER_H_ */
<reponame>fullstackatbrown/hours-backend package repository import ( "cloud.google.com/go/firestore" "fmt" "github.com/golang/glog" "github.com/google/uuid" "github.com/mitchellh/mapstructure" "google.golang.org/api/iterator" "log" "net/http" "signmeup/internal/config" "signmeup/internal/firebase" "signmeup/internal/models" "signmeup/internal/qerrors" "strings" firebaseAuth "firebase.google.com/go/auth" ) func (fr *FirebaseRepository) initializeUserProfilesListener() { handleDocs := func(docs []*firestore.DocumentSnapshot) error { newProfiles := make(map[string]*models.Profile) for _, doc := range docs { if !doc.Exists() { continue } var c models.Profile err := mapstructure.Decode(doc.Data(), &c) if err != nil { log.Panicf("Error destructuring document: %v", err) return err } newProfiles[doc.Ref.ID] = &c } fr.profilesLock.Lock() defer fr.profilesLock.Unlock() fr.profiles = newProfiles return nil } done := make(chan bool) query := fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Query go func() { err := fr.createCollectionInitializer(query, &done, handleDocs) if err != nil { log.Panicf("error creating user profiles collection listener: %v\n", err) } }() <-done } // VerifySessionCookie verifies that the given session cookie is valid and returns the associated User if valid. func (fr *FirebaseRepository) VerifySessionCookie(sessionCookie *http.Cookie) (*models.User, error) { decoded, err := fr.authClient.VerifySessionCookieAndCheckRevoked(firebase.Context, sessionCookie.Value) if err != nil { return nil, fmt.Errorf("error verifying cookie: %v\n", err) } user, err := fr.GetUserByID(decoded.UID) if err != nil { return nil, fmt.Errorf("error getting user from cookie: %v\n", err) } return user, nil } func (fr *FirebaseRepository) GetUserByID(id string) (*models.User, error) { if err := validateID(id); err != nil { return nil, err } fbUser, err := fr.authClient.GetUser(firebase.Context, id) if err != nil { return nil, qerrors.UserNotFoundError } // TODO: Refactor email verification and user profile creation into separate function. // Check the Firebase user's email against the list of allowed domains. if len(config.Config.AllowedEmailDomains) > 0 { domain := strings.Split(fbUser.Email, "@")[1] if !contains(config.Config.AllowedEmailDomains, domain) { // invalid email domain, delete the user from Firebase Auth _ = fr.authClient.DeleteUser(firebase.Context, fbUser.UID) return nil, qerrors.InvalidEmailError } } profile, err := fr.getUserProfile(fbUser.UID) if err != nil { // no profile for the user found, create one. profile = &models.Profile{ DisplayName: fbUser.DisplayName, Email: fbUser.Email, PhotoURL: fbUser.PhotoURL, // if there are no registered users, make the first one an admin IsAdmin: fr.getUserCount() == 0, } _, err = fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Doc(fbUser.UID).Set(firebase.Context, map[string]interface{}{ "coursePermissions": make(map[string]models.CoursePermission), "displayName": profile.DisplayName, "email": profile.Email, "photoUrl": profile.PhotoURL, "meetingLink": "", "pronouns": "", "id": fbUser.UID, "isAdmin": profile.IsAdmin, "notifications": make([]models.Notification, 0), }) if err != nil { return nil, fmt.Errorf("error creating user profile: %v\n", err) } // Go through each of the invites and execute them. iter := fr.firestoreClient.Collection(models.FirestoreInvitesCollection).Where("email", "==", fbUser.Email).Documents(firebase.Context) for { // Get this document. doc, err := iter.Next() if err == iterator.Done { break } if err != nil { return nil, err } // Decode this document. var invite models.CourseInvite err = mapstructure.Decode(doc.Data(), &invite) if err != nil { return nil, err } // Execute the invite. err = fr.AddPermission(&models.AddCoursePermissionRequest{ CourseID: invite.CourseID, Email: invite.Email, Permission: invite.Permission, }) if err != nil { glog.Warningf("there was a problem adding course permission to a user: %v\n", err) } // Delete the doc. _, err = doc.Ref.Delete(firebase.Context) if err != nil { glog.Warningf("there was a problem deleting invite: %v\n", err) } } } return fbUserToUserRecord(fbUser, profile), nil } // GetUserByEmail retrieves the User associated with the given email. func (fr *FirebaseRepository) GetUserByEmail(email string) (*models.User, error) { userID, err := fr.GetIDByEmail(email) if err != nil { return nil, err } return fr.GetUserByID(userID) } func (fr *FirebaseRepository) GetIDByEmail(email string) (string, error) { // Get user by email. iter := fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Where("email", "==", email).Documents(firebase.Context) doc, err := iter.Next() if err != nil { return "", err } // Cast. data := doc.Data() return data["id"].(string), nil } func (fr *FirebaseRepository) UpdateUser(r *models.UpdateUserRequest) error { if r.DisplayName == "" { return qerrors.InvalidDisplayName } _, err := fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Doc(r.UserID).Update(firebase.Context, []firestore.Update{ { Path: "displayName", Value: r.DisplayName, }, { Path: "pronouns", Value: r.Pronouns, }, { Path: "meetingLink", Value: r.MeetingLink, }, }) return err } // MakeAdminByEmail makes the user with the given email a site admin. func (fr *FirebaseRepository) MakeAdminByEmail(u *models.MakeAdminByEmailRequest) error { user, err := fr.GetUserByEmail(u.Email) if err != nil { return err } _, err = fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Doc(user.ID).Update(firebase.Context, []firestore.Update{ { Path: "isAdmin", Value: u.IsAdmin, }, }) return err } func (fr *FirebaseRepository) Count() int { fr.profilesLock.RLock() defer fr.profilesLock.RUnlock() return len(fr.profiles) } func (fr *FirebaseRepository) List() ([]*models.User, error) { var users []*models.User iter := fr.authClient.Users(firebase.Context, "") for { fbUser, err := iter.Next() if err == iterator.Done { break } if err != nil { return nil, fmt.Errorf("error listing user_mgt: %s\n", err) } profile, err := fr.getUserProfile(fbUser.UID) if err != nil { return nil, err } user := fbUserToUserRecord(fbUser.UserRecord, profile) users = append(users, user) } return users, nil } // Operations func (fr *FirebaseRepository) AddNotification(userID string, notification models.Notification) error { notification.ID = uuid.New().String() _, err := fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Doc(userID).Update(firebase.Context, []firestore.Update{ { Path: "notifications", Value: firestore.ArrayUnion(notification), }, }) return err } func (fr *FirebaseRepository) ClearNotification(c *models.ClearNotificationRequest) error { user, err := fr.GetUserByID(c.UserID) if err != nil { return err } newNotifications := make([]models.Notification, 0) for _, v := range user.Notifications { if v.ID != c.NotificationID { newNotifications = append(newNotifications, v) } } _, err = fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Doc(c.UserID).Update(firebase.Context, []firestore.Update{ { Path: "notifications", Value: newNotifications, }, }) return err } func (fr *FirebaseRepository) ClearAllNotifications(c *models.ClearAllNotificationsRequest) error { _, err := fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Doc(c.UserID).Update(firebase.Context, []firestore.Update{ { Path: "notifications", Value: make([]models.Notification, 0), }, }) return err } // Validate checks a CreateUserRequest struct for errors. func validate(u *models.CreateUserRequest) error { if err := validateEmail(u.Email); err != nil { return err } if err := validatePassword(u.Password); err != nil { return err } if err := validateDisplayName(u.DisplayName); err != nil { return err } return nil } func (fr *FirebaseRepository) Create(user *models.CreateUserRequest) (*models.User, error) { if err := validate(user); err != nil { return nil, err } // Create a user in Firebase Auth. u := (&firebaseAuth.UserToCreate{}).Email(user.Email).Password(user.Password) fbUser, err := fr.authClient.CreateUser(firebase.Context, u) if err != nil { return nil, fmt.Errorf("error creating user: %v\n", err) } // Create a user profile in Firestore. profile := &models.Profile{ DisplayName: user.DisplayName, Email: user.Email, } _, err = fr.firestoreClient.Collection(models.FirestoreUserProfilesCollection).Doc(fbUser.UID).Set(firebase.Context, map[string]interface{}{ "permissions": []string{}, "displayName": profile.DisplayName, "email": profile.Email, "id": fbUser.UID, }) if err != nil { return nil, fmt.Errorf("error creating user profile: %v\n", err) } return fbUserToUserRecord(fbUser, profile), nil } func (fr *FirebaseRepository) Delete(id string) error { // Delete account from Firebase Authentication. err := fr.authClient.DeleteUser(firebase.Context, id) if err != nil { return qerrors.DeleteUserError } // Delete profile from user_profiles Firestore collection. _, err = fr.firestoreClient.Collection("user_profiles").Doc(id).Delete(firebase.Context) if err != nil { return qerrors.DeleteUserError } return nil } // Helpers // fbUserToUserRecord combines a Firebase UserRecord and a Profile into a User func fbUserToUserRecord(fbUser *firebaseAuth.UserRecord, profile *models.Profile) *models.User { // TODO: Refactor such that displayName, email, and profile photo are pulled from firebase auth and not the user profile stored in Firestore. return &models.User{ ID: fbUser.UID, Profile: profile, Disabled: fbUser.Disabled, CreationTimestamp: fbUser.UserMetadata.CreationTimestamp, LastLogInTimestamp: fbUser.UserMetadata.LastLogInTimestamp, } } // getUserProfile gets the Profile from the userProfiles map corresponding to the provided user ID. func (fr *FirebaseRepository) getUserProfile(id string) (*models.Profile, error) { fr.profilesLock.RLock() defer fr.profilesLock.RUnlock() if val, ok := fr.profiles[id]; ok { return val, nil } else { return nil, fmt.Errorf("No profile found for ID %v\n", id) } } // getUserCount returns the number of user profiles. func (fr *FirebaseRepository) getUserCount() int { fr.profilesLock.RLock() defer fr.profilesLock.RUnlock() return len(fr.profiles) } func contains(s []string, str string) bool { for _, v := range s { if v == str { return true } } return false } // TODO: Maybe find a better place for this? func validateEmail(email string) error { if email == "" { return fmt.Errorf("email must be a non-empty string") } if parts := strings.Split(email, "@"); len(parts) != 2 || parts[0] == "" || parts[1] == "" { return fmt.Errorf("malformed email string: %q", email) } return nil } func validatePassword(val string) error { if len(val) < 6 { return fmt.Errorf("password must be a string at least 6 characters long") } return nil } func validateDisplayName(val string) error { if val == "" { return fmt.Errorf("display name must be a non-empty string") } return nil } func validateID(id string) error { if id == "" { return fmt.Errorf("id must be a non-empty string") } if len(id) > 128 { return fmt.Errorf("id string must not be longer than 128 characters") } return nil }
class DownloadResult: """ This class represents a result of downloading of a single spectrum. It contains information about spectrum final name, download link, exception in case of download failure. """ def __init__(self, name, url, exception=None): """ Initializes instance by passed arguments. :param name: Final expected name of spectrum on the filesystem. :param url: URL address the download was initiated from. :param exception: Exception that was thrown during spectrum download process. The exception signalizes the download failed. If None is passed spectrum is considered as successfully downloaded. """ self.name = name self.url = url self.exception = exception @property def success(self): """Property that signalizes download success.""" return self.exception is None
<gh_stars>1-10 /* * Copyright 1995-2020 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include <openssl/opensslconf.h> #include <stdio.h> #include <stdlib.h> #include <time.h> #include <string.h> #include "apps.h" #include "progs.h" #include <openssl/bio.h> #include <openssl/err.h> #include <openssl/bn.h> #include <openssl/dsa.h> #include <openssl/dh.h> #include <openssl/x509.h> #include <openssl/pem.h> #include <openssl/core_names.h> #include <openssl/core_dispatch.h> #include <openssl/param_build.h> #include <openssl/encoder.h> #include <openssl/decoder.h> #define DEFBITS 2048 static EVP_PKEY *dsa_to_dh(EVP_PKEY *dh); static int gendh_cb(EVP_PKEY_CTX *ctx); typedef enum OPTION_choice { OPT_ERR = -1, OPT_EOF = 0, OPT_HELP, OPT_INFORM, OPT_OUTFORM, OPT_IN, OPT_OUT, OPT_ENGINE, OPT_CHECK, OPT_TEXT, OPT_NOOUT, OPT_DSAPARAM, OPT_2, OPT_3, OPT_5, OPT_R_ENUM, OPT_PROV_ENUM } OPTION_CHOICE; const OPTIONS dhparam_options[] = { {OPT_HELP_STR, 1, '-', "Usage: %s [options] [numbits]\n"}, OPT_SECTION("General"), {"help", OPT_HELP, '-', "Display this summary"}, {"check", OPT_CHECK, '-', "Check the DH parameters"}, #if !defined(OPENSSL_NO_DSA) || !defined(OPENSSL_NO_DEPRECATED_3_0) {"dsaparam", OPT_DSAPARAM, '-', "Read or generate DSA parameters, convert to DH"}, #endif #ifndef OPENSSL_NO_ENGINE {"engine", OPT_ENGINE, 's', "Use engine e, possibly a hardware device"}, #endif OPT_SECTION("Input"), {"in", OPT_IN, '<', "Input file"}, {"inform", OPT_INFORM, 'F', "Input format, DER or PEM"}, OPT_SECTION("Output"), {"out", OPT_OUT, '>', "Output file"}, {"outform", OPT_OUTFORM, 'F', "Output format, DER or PEM"}, {"text", OPT_TEXT, '-', "Print a text form of the DH parameters"}, {"noout", OPT_NOOUT, '-', "Don't output any DH parameters"}, {"2", OPT_2, '-', "Generate parameters using 2 as the generator value"}, {"3", OPT_3, '-', "Generate parameters using 3 as the generator value"}, {"5", OPT_5, '-', "Generate parameters using 5 as the generator value"}, OPT_R_OPTIONS, OPT_PROV_OPTIONS, OPT_PARAMETERS(), {"numbits", 0, 0, "Number of bits if generating parameters (optional)"}, {NULL} }; int dhparam_main(int argc, char **argv) { BIO *in = NULL, *out = NULL; EVP_PKEY *pkey = NULL, *tmppkey = NULL; EVP_PKEY_CTX *ctx = NULL; char *infile = NULL, *outfile = NULL, *prog; ENGINE *e = NULL; int dsaparam = 0; int text = 0, ret = 1, num = 0, g = 0; int informat = FORMAT_PEM, outformat = FORMAT_PEM, check = 0, noout = 0; OPTION_CHOICE o; prog = opt_init(argc, argv, dhparam_options); while ((o = opt_next()) != OPT_EOF) { switch (o) { case OPT_EOF: case OPT_ERR: opthelp: BIO_printf(bio_err, "%s: Use -help for summary.\n", prog); goto end; case OPT_HELP: opt_help(dhparam_options); ret = 0; goto end; case OPT_INFORM: if (!opt_format(opt_arg(), OPT_FMT_PEMDER, &informat)) goto opthelp; break; case OPT_OUTFORM: if (!opt_format(opt_arg(), OPT_FMT_PEMDER, &outformat)) goto opthelp; break; case OPT_IN: infile = opt_arg(); break; case OPT_OUT: outfile = opt_arg(); break; case OPT_ENGINE: e = setup_engine(opt_arg(), 0); break; case OPT_CHECK: check = 1; break; case OPT_TEXT: text = 1; break; case OPT_DSAPARAM: dsaparam = 1; break; case OPT_2: g = 2; break; case OPT_3: g = 3; break; case OPT_5: g = 5; break; case OPT_NOOUT: noout = 1; break; case OPT_R_CASES: if (!opt_rand(o)) goto end; break; case OPT_PROV_CASES: if (!opt_provider(o)) goto end; break; } } /* One optional argument, bitsize to generate. */ argc = opt_num_rest(); argv = opt_rest(); if (argc == 1) { if (!opt_int(argv[0], &num) || num <= 0) goto opthelp; } else if (argc != 0) { goto opthelp; } if (g && !num) num = DEFBITS; if (dsaparam && g) { BIO_printf(bio_err, "Error, generator may not be chosen for DSA parameters\n"); goto end; } out = bio_open_default(outfile, 'w', outformat); if (out == NULL) goto end; /* DH parameters */ if (num && !g) g = 2; if (num) { const char *alg = dsaparam ? "DSA" : "DH"; ctx = EVP_PKEY_CTX_new_from_name(NULL, alg, NULL); if (ctx == NULL) { BIO_printf(bio_err, "Error, %s param generation context allocation failed\n", alg); goto end; } EVP_PKEY_CTX_set_cb(ctx, gendh_cb); EVP_PKEY_CTX_set_app_data(ctx, bio_err); BIO_printf(bio_err, "Generating %s parameters, %d bit long %sprime\n", alg, num, dsaparam ? "" : "safe "); if (!EVP_PKEY_paramgen_init(ctx)) { BIO_printf(bio_err, "Error, unable to initialise %s parameters\n", alg); goto end; } if (dsaparam) { if (!EVP_PKEY_CTX_set_dsa_paramgen_bits(ctx, num)) { BIO_printf(bio_err, "Error, unable to set DSA prime length\n"); goto end; } } else { if (!EVP_PKEY_CTX_set_dh_paramgen_prime_len(ctx, num)) { BIO_printf(bio_err, "Error, unable to set DH prime length\n"); goto end; } if (!EVP_PKEY_CTX_set_dh_paramgen_generator(ctx, g)) { BIO_printf(bio_err, "Error, unable to set generator\n"); goto end; } } if (!EVP_PKEY_paramgen(ctx, &tmppkey)) { BIO_printf(bio_err, "Error, %s generation failed\n", alg); goto end; } EVP_PKEY_CTX_free(ctx); ctx = NULL; if (dsaparam) { pkey = dsa_to_dh(tmppkey); if (pkey == NULL) goto end; EVP_PKEY_free(tmppkey); } else { pkey = tmppkey; } tmppkey = NULL; } else { OSSL_DECODER_CTX *decoderctx = NULL; const char *keytype = "DH"; int done; in = bio_open_default(infile, 'r', informat); if (in == NULL) goto end; do { /* * We assume we're done unless we explicitly want to retry and set * this to 0 below. */ done = 1; /* * We set NULL for the keytype to allow any key type. We don't know * if we're going to get DH or DHX (or DSA in the event of dsaparam). * We check that we got one of those key types afterwards. */ decoderctx = OSSL_DECODER_CTX_new_by_EVP_PKEY(&tmppkey, (informat == FORMAT_ASN1) ? "DER" : "PEM", NULL, (informat == FORMAT_ASN1) ? keytype : NULL, OSSL_KEYMGMT_SELECT_DOMAIN_PARAMETERS, NULL, NULL); if (decoderctx != NULL && !OSSL_DECODER_from_bio(decoderctx, in) && informat == FORMAT_ASN1 && strcmp(keytype, "DH") == 0) { /* * When reading DER we explicitly state the expected keytype * because, unlike PEM, there is no header to declare what * the contents of the DER file are. The decoders just try * and guess. Unfortunately with DHX key types they may guess * wrong and think we have a DSA keytype. Therefore we try * both DH and DHX sequentially. */ keytype = "DHX"; /* * BIO_reset() returns 0 for success for file BIOs only!!! * This won't work for stdin (and never has done) * TODO: We should fix this at some point */ if (BIO_reset(in) == 0) done = 0; } OSSL_DECODER_CTX_free(decoderctx); } while (!done); if (tmppkey == NULL) { BIO_printf(bio_err, "Error, unable to load parameters\n"); goto end; } if (dsaparam) { if (!EVP_PKEY_is_a(tmppkey, "DSA")) { BIO_printf(bio_err, "Error, unable to load DSA parameters\n"); goto end; } pkey = dsa_to_dh(tmppkey); if (pkey == NULL) goto end; } else { if (!EVP_PKEY_is_a(tmppkey, "DH") && !EVP_PKEY_is_a(tmppkey, "DHX")) { BIO_printf(bio_err, "Error, unable to load DH parameters\n"); goto end; } pkey = tmppkey; tmppkey = NULL; } } if (text) EVP_PKEY_print_params(out, pkey, 4, NULL); if (check) { ctx = EVP_PKEY_CTX_new_from_pkey(NULL, pkey, NULL); if (ctx == NULL) { BIO_printf(bio_err, "Error, failed to check DH parameters\n"); goto end; } if (!EVP_PKEY_param_check(ctx)) { BIO_printf(bio_err, "Error, invalid parameters generated\n"); goto end; } BIO_printf(bio_err, "DH parameters appear to be ok.\n"); } if (!noout) { OSSL_ENCODER_CTX *ectx = OSSL_ENCODER_CTX_new_by_EVP_PKEY(pkey, OSSL_KEYMGMT_SELECT_DOMAIN_PARAMETERS, outformat == FORMAT_ASN1 ? "DER" : "PEM", NULL, NULL); if (ectx == NULL || !OSSL_ENCODER_to_bio(ectx, out)) { OSSL_ENCODER_CTX_free(ectx); BIO_printf(bio_err, "Error, unable to write DH parameters\n"); goto end; } OSSL_ENCODER_CTX_free(ectx); } ret = 0; end: if (ret != 0) ERR_print_errors(bio_err); BIO_free(in); BIO_free_all(out); EVP_PKEY_free(pkey); EVP_PKEY_free(tmppkey); EVP_PKEY_CTX_free(ctx); release_engine(e); return ret; } /* * Historically we had the low level call DSA_dup_DH() to do this. * That is now deprecated with no replacement. Since we still need to do this * for backwards compatibility reasons, we do it "manually". */ static EVP_PKEY *dsa_to_dh(EVP_PKEY *dh) { OSSL_PARAM_BLD *tmpl = NULL; OSSL_PARAM *params = NULL; BIGNUM *bn_p = NULL, *bn_q = NULL, *bn_g = NULL; EVP_PKEY_CTX *ctx = NULL; EVP_PKEY *pkey = NULL; if (!EVP_PKEY_get_bn_param(dh, OSSL_PKEY_PARAM_FFC_P, &bn_p) || !EVP_PKEY_get_bn_param(dh, OSSL_PKEY_PARAM_FFC_Q, &bn_q) || !EVP_PKEY_get_bn_param(dh, OSSL_PKEY_PARAM_FFC_G, &bn_g)) { BIO_printf(bio_err, "Error, failed to set DH parameters\n"); goto err; } if ((tmpl = OSSL_PARAM_BLD_new()) == NULL || !OSSL_PARAM_BLD_push_BN(tmpl, OSSL_PKEY_PARAM_FFC_P, bn_p) || !OSSL_PARAM_BLD_push_BN(tmpl, OSSL_PKEY_PARAM_FFC_Q, bn_q) || !OSSL_PARAM_BLD_push_BN(tmpl, OSSL_PKEY_PARAM_FFC_G, bn_g) || (params = OSSL_PARAM_BLD_to_param(tmpl)) == NULL) { BIO_printf(bio_err, "Error, failed to set DH parameters\n"); goto err; } ctx = EVP_PKEY_CTX_new_from_name(NULL, "DHX", NULL); if (ctx == NULL || !EVP_PKEY_fromdata_init(ctx) || !EVP_PKEY_fromdata(ctx, &pkey, EVP_PKEY_KEY_PARAMETERS, params)) { BIO_printf(bio_err, "Error, failed to set DH parameters\n"); goto err; } err: EVP_PKEY_CTX_free(ctx); OSSL_PARAM_BLD_free_params(params); OSSL_PARAM_BLD_free(tmpl); BN_free(bn_p); BN_free(bn_q); BN_free(bn_g); return pkey; } static int gendh_cb(EVP_PKEY_CTX *ctx) { int p = EVP_PKEY_CTX_get_keygen_info(ctx, 0); BIO *b = EVP_PKEY_CTX_get_app_data(ctx); static const char symbols[] = ".+*\n"; char c = (p >= 0 && (size_t)p < sizeof(symbols) - 1) ? symbols[p] : '?'; BIO_write(b, &c, 1); (void)BIO_flush(b); return 1; }
/** * This {@link BlockStateMock} represents a {@link TileState} which is capable of storing persistent data using a * {@link PersistentDataContainerMock}. * * @author TheBusyBiscuit * */ public abstract class TileStateMock extends BlockStateMock implements TileState { private final PersistentDataContainerMock container; public TileStateMock(@NotNull Material material) { super(material); this.container = new PersistentDataContainerMock(); } protected TileStateMock(@NotNull Block block) { super(block); this.container = new PersistentDataContainerMock(); } protected TileStateMock(@NotNull TileStateMock state) { super(state); this.container = new PersistentDataContainerMock(state.container); } @Override public PersistentDataContainer getPersistentDataContainer() { return container; } @Override public abstract BlockState getSnapshot(); }
This is the third and final part of a three-part series on designing and building a barcode scanner using a Raspberry Pi Zero W. Raspberry Pi Zero Handheld Barcode Scanner Part 1 Raspberry Pi Zero Handheld Barcode Scanner Part 2 In the previous blog post the circuits were finished and a case was designed and sent for printing with Shapeways. The case arrived within a couple of weeks and overall, I found the service from Shapeways to be excellent. They sent us an email a few days after ordering saying that the case had banding lines along the surfaces and asked if we still wanted it. The design had several surfaces with very shallow curves and as 3d printers work by laying down thin layers of plastic on top of each other banding on the surfaces was to be expected so we told them that it would be fine and they shipped the case to us. When the case arrived, we found a couple of small problems with our CAD design. One half of the case had a flange around the edge that was supposed to sit inside the other half when they are put together to keep the edges aligned. On one side of the case the flange lined up perfectly but I made a mistake with the alignment of the flange on the other side and it was about 0.5mm out so the case would not close tightly together. A sharp knife solved this problem. The second problem was with the battery cover. I made the tolerances between the case and the battery cover too close and they would not clip together properly but a rub around the edges with a bit of sand paper solved that issue. Other than those problems everything worked fine. The internal mounting holes all aligned with the PCBs and the LCD display lined up with the hole in the top. We used a piece of 3mm red transparent acrylic for the front of the case where the laser shines through. Once everything was assembled, we found that where we had put the laser PCB flat inside the case some of the laser beam was reflecting back off if the acrylic into the sensor stopping it from reading barcodes so we put some spacers between the case and the front of the scanner PCB to offset the angle. This meant that the laser now pointed downwards slightly and any internal reflections off of the acrylic would not be bounced back into the sensor allowing it to work correctly. After we found this issue I went back to look at the case the scanner assembly was removed from and found that the red acrylic cover on that was offset from 90°. If I had noticed that before designing the case I would have made the front mounting posts for the PCB longer. We added a red LED to the top of the case connected to the 5V pin on the Raspberry Pi. This was needed because when the Raspberry Pi is shutdown it turns off the backlight on the LCD so there was no way of seeing that the barcode scanner was still switched on other than looking at the direction of the slide switch on the side. As it would be easy to accidentally leave the device switched on it was decided that an LED on top would make it easier to see if it was still powered. With the case assembled the next stage was to design the software to run on the barcode scanner. The original plan was to use apache and run a web application on the Raspberry Pi with a web browser displaying everything on the LCD. We got quite a way into building the project this way before finding that the performance of the Raspberry Pi Zero was too slow to make a usable user interface. The refresh rate of the browser when scrolling was less than 1 frame a second so if you tried to scroll up through a list of orders you would swipe your finger upwards and a couple seconds later the screen would update to show that you had scrolled too far. As this would be unusable for a device that would be in regular use we decided to scrap the web application idea and build a normal GUI application to run on the Raspberry Pi. An application designed in assembly language or C would have given the best performance on the Raspberry Pi but as I don’t have any experience designing GUI applications in C and the only assembly language programming I have done is flashing an LED on a PIC microcontroller it was decided that a python program using TkInter for the GUI would be a better approach. Slower in performance than a compiled C program but quicker than a web application. The first job was to create a flow chart of how the software would work. (click to view PDF) The software is designed to integrate into our ecommerce and inventory manager and has three main sections, orders, products and parts. A tab bar at the top of the screen allows you to select which section you want but when you scan a barcode it will automatically detect the type and put you into the correct section of the application. The orders section displays a list of the outstanding orders on our ecommerce website. Each invoice we print has a barcode on it that contains the order number so scanning the barcode tells the application which invoice it needs to retrieve from the server. Once the order details are received it displays the details for the order with a list of the products that have been bought. Each product is then scanned as it is picked for the order and the application updates to show if the correct number of items have been picked. For items that do not have a barcode like connectors you can manually enter the number picked using a numeric keypad. Once the order is ready a save button sends data back to the server telling it that the order is processed and ready for dispatch. The products section shows a list of the products for sale on the website. When a product barcode is scanned the name and ID of that product is displayed along with any options that are available such as size or colour and the number of each option in stock. You can then add extra stock to the inventory or update the stock level manually. This section is designed so that when we build new stock we can easily add it into the website by scanning the products barcode and enter the number we have built. The parts section still needs to be completed but this section is designed to integrate with our internal intranet that we use to keep track of the components used when building our products. When a new roll of resistors or chips arrives, we will be able to scan its barcode and enter that number into the inventory. The intranet is still under development but the plan is to make a system integrated with the barcode scanner and our pick and place machine so the scanner adds stock to the system and the pick and place machine automatically deducts parts from the stock every time it picks a part to go on a product. That way we will always know how many of every component we have in stock and forecast when we need to order more. A title bar was added along the top of the screen which shows the current time, Wi-Fi status and battery status. The battery status is based on the voltage of the lithium cells which we profiled to get the correct voltage curve. A power button was added on the tab bar which brings up a screen where you can shut down or restart the device or exit the application which returns you to the bash shell. Exiting the application should only be needed during development and debugging. If the application does crash then it is possible to SSH into the barcode scanner over the Wi-Fi connection to find out what went wrong. The TkInter GUI python library does not include support for touch screens so we had to write our own methods for detecting touches and drag events. This was done by detecting the position of the cursor on press and comparing its position and the time to release. If the position was close and the time less than 1 second then it was detected as a click. If cursor moved between the press and release then that was detected as a drag event and the content frame was moved to match the distance the cursor moved. A timer was started on the press event and the position was checked and updated every 10ms. This resulted in smooth scrolling when you press and drag your finger across the screen. With the software ready, the barcode scanner was complete. Initial tests show that we should get around 10 hours of battery life out of a pair of 18650 lithium battery cells. As the scanner will probably only be needed for around half an hour each day when we are packaging orders we should only need to recharge it twice a month. The hardware design files and software for this project can be found on our GitHub repository. The internal WiFi antenna we used for this project was from https://uk.rs-online.com/web/p/wifi-antennas/7043326/ Permalink
Systolic growth of linear groups We prove that the residual girth of any finitely generated linear group is at most exponential. This means that the smallest finite quotient in which the $n$-ball injects has at most exponential size. If the group is also not virtually nilpotent, it follows that the residual girth is precisely exponential. Introduction Let Γ be a group with a finite generating subset S, and | · | S the corresponding word length. We assume for convenience that S is symmetric and contains the unit, so that S n is equal to the n-ball. The following three functions are attached to (Γ, S): • the growth: the cardinal b Γ,S (n) of S n ; • the systolic growth: the function σ Γ,S mapping n to the smallest k such that some subgroup H of index k contains no nontrivial element of the n-ball; if no such k exists, we define it as +∞; • the residual girth, or normal systolic growth σ ′ Γ,S : same definition, with the additional requirement that H is normal. The growth is always defined and is at most exponential, while the systolic growth and residual girth take finite values if and only if Γ is residually finite, and in this case they can be larger than exponential, as the example in show. Furthermore, we have the obvious inequalities b Γ,S (n) ≤ σ Γ,S (2n + 1) ≤ σ ⊳ Γ,S (2n + 1). The asymptotic behavior of these functions, for finitely generated groups, does not depend on the finite generating subset. A simple example for the residual girth grows strictly faster than the systolic growth is the case of the integral Heisenberg group, for which the growth and systolic growth behaves as n 4 while the residual girth grows as n 6 (see ). Also the systolic growth may grow faster than the growth and actually can grow arbitrarily fast. We show here that in linear groups, this is not the case. Theorem 1.1. Assume that Γ admits a faithful finite-dimensional representation over a field (or a product of fields). Then the residual girth (and hence the systolic growth) of Γ are at most exponential. In particular, if Γ is not virtually nilpotent, then its residual girth and its systolic growth are exponential. Such a result was asserted by Gromov for subgroups of SL d (Z), under some technical superfluous additional assumption (non-existence of nontrivial unipotent elements). The proof of Theorem 1.1 consists in finding small enough quotient fields of the ring of entries, while ensuring that the n-ball is mapped injectively. The argument can be simplified in case Γ ⊂ GL d (Q), since then reduction modulo p for all p large enough work with no further effort; in this case the finite quotients are explicit, while in the general case we only find a suitable quotient field using a counting argument. Example 1.2. The group Z≀Z has an exponential residual girth. Another example is (Z/6Z) ≀ Z, which is linear over a product of 2 fields, but not over a single field. Remark 1.3. Closely related functions are the residual finiteness growth, which maps n to the smallest number s Γ,S (n) such that for every g ∈ S n {1}, there is a finite index subgroup of Γ avoiding g, and s ⊳ Γ,S (n) defined in the same way with only normal finite index subgroups. For finitely generated group that are linear over a field, a polynomial upper bound for these functions is established in , and in the case of higher rank arithmetic groups, the precise behavior is obtained in : for instance, for SL d (Z) for d ≥ 3, the normal residual finiteness growth grows as n d 2 −1 . Preliminaries on polynomials over finite fields Lemma 2.1. Let F be a finite field with q elements. Given an integer n ≥ 1, the number of irreducible monic polynomials of degree n in F is ≤ q n /n and ≥ (q n − q n−1 )/n. Proof. The case n = 1 being trivial, we can assume n ≥ 1. By Gauss' formula A similar argument shows that nN q (n) ≥ q n − q 1+n/p , which is ≥ q n − q n−1 if n ≥ 3; the cases n ≤ 2 being trivial. Lemma 2.2. Let F be a field with q elements. Let P ∈ F be a nonzero polynomial of degree ≤ n. Then P survives in a quotient field of F of cardinal ≤ 2nq. Proof. Let m ≥ 1 be the largest number such that every irreducible polynomial of degree m − 1 divides P . Let us check that q m ≤ 2nq; the case m = 1 being trivial, we assume m ≥ 2. By Lemma 2.1, there are ≥ (q m−1 − q m−2 )/(m − 1) monic irreducible polynomials of degree m − 1. Hence their product, which has degree ≥ q m−1 − q m−2 , divides P . Thus q m−1 − q m−2 ≤ n. We have 1 − q −1 ≥ 1/2; thus 1 2 q m q −1 ≤ n, that is q m ≤ 2nq. Some irreducible polynomial of degree m does not divide P , hence the quotient provides a field quotient of cardinal q m ≤ 2nq in which P survives. Corollary 2.3. Let F be a field with q elements and P a nonzero polynomial in F , of degree ≤ n with respect to each indeterminate. Then P survives in a quotient field of cardinal ≤ (2n) k q. Proof. Induction on k. The result is trivial for k = 0. Write Some P i is nonzero; fix such i. Then there exists, by induction, some quotient field L of F of cardinal ≤ (2n) k−1 q in which P i survives. Then the image of P in L has degree ≤ n and is nonzero; hence by Lemma 2.2, it survives in a quotient field of cardinal 2n((2n) k−1 q) = (2n) k q. Conclusion of the proof Proposition 3.1. Every finitely generated group that is linear over a field of characteristic p has at most exponential residual girth. Proof. Such a group embeds into GL d (K) where K is an extension of degree b of some field K ′ = F q (t 1 , . . . , t k ), and hence embeds into GL bd (K ′ ). Hence it is no restriction to assume that the group is contained in GL d (F q (t 1 , . . . , t k )). We let S be a finite symmetric generating subset with 1; it is actually contained in . If x is a matrix, let b(x) be the product of all its nonzero entries (thus b(0) = 1). Let m be such that every entry of every element of T has degree ≤ m with respect to each variable. Then in T 2n , every entry of every element has degree ≤ 2nm with respect to each variable. Define x n = y∈T 2n b(y − 1). Thus x n is a product of at most d 2 s 2n polynomials of degree ≤ 2nm with respect to each variable. Define x ′ n = x n Q; assume that Q has degree ≤ δ with respect to each variable, so that x ′ n has degree ≤ 2d 2 mns 2n + δ with respect to each variable. Then, by Corollary 2.3, x ′ n survives in a finite field F n of cardinal q 1 ≤ q(4d 2 mns 2n + 2δ) k . Thus S n is mapped injectively into GL d (F n ), which has cardinal ≤ q d 2 1 ≤ q d 2 (4d 2 mns 2n + 2δ) kd 2 . Since m, d, k, s, q are fixed, this grows at most exponentially with respect to n. Proposition 3.2. Every finitely generated group that is linear over a field of characteristic 0 has at most exponential residual girth. Proof. Similarly as in the proof of Proposition 3.1, we can suppose that the group is contained in GL d (Q(t 1 , . . . , t k )). We let S be a finite symmetric generating subset with 1; it is actually contained in GL d (Z ) for some nonzero integer r ≥ 1 and nonzero polynomial Q with coprime coefficients. Write S = (Qr) −λ T with λ a non-negative integer and T ⊂ Mat d (Z ); write s = #(S) = #(T ). Let R be an upper bound on coefficients of entries of elements of T , and let M be an upper bound on the number of nonzero coefficients of entries of elements of T . Then any product of 2n elements of T is a sum of ≤ M 2n monomials, each with a coefficient of absolute value ≤ R 2n . Since any entry of an element in T 2n is a sum of at most d 2n−1 such products, we deduce that the coefficients of entries of elements of T 2n are ≤ d 2n−1 R 2n M 2n . There exists a prime p n ∈ . There exists n 0 such that for every n ≥ n 0 , 2d 2n−1 (RM) 2 n is greater than any prime divisor of r, and 2d 2n−1 (RM) 2 n is greater than the lowest absolute value of a nonzero coefficient of Q. Now we always assume n ≥ n 0 . Then S 2n is mapped injectively into Let m be such that every entry of any element of T has degree ≤ m with respect to each variable. The previous proof provides a quotient GL d (F n ) of GL d ((Z/p n Z) ) in which S n is mapped injectively, such that GL d (F n ) has cardinal ≤ p n d 2 (4d 2 mns 2n + 2δ) kd 2 Here m, d, s, k are independent of n. The latter number is which grows at most exponentially with respect to n. Proof of Theorem 1.1. First assume that Γ is linear over some field. By Propositions 3.1 and 3.2, the residual girth, and hence the systolic growth, is at most exponential. If Γ is not virtually nilpotent, then by the Tits-Rosenblatt alternative, it contains a free subsemigroup on 2 generators and hence has exponential growth, and therefore has at least exponential systolic growth and residual girth. Now assume that Γ is linear over some product of fields. Let A be the ring generated by entries of Γ. This is a finitely generated reduced commutative ring; hence it has finitely many minimal prime ideals, whose intersection equals the set of nilpotent elements and hence is reduced to zero. Therefore Γ embeds into a finite product of matrix group over various fields. We conclude that Γ has at most exponential residual girth, using the following two general facts: • suppose that Γ 1 , . . . , Γ k are finitely generated groups and Γ i has residual girth asymptotically bounded above by some function u i ≥ 1, then the residual girth of k i=1 Γ i is asymptotically bounded above by u i ; • if Λ 1 ⊂ Λ 2 are finitely generated groups then the residual girth of Λ 1 is asymptotically bounded above by that of Λ 2 .
def wrap_language(filename, content, compressed): name = lang_name(filename) if compressed: content = content.rstrip(';') wrap = 'hljs.registerLanguage("%s",%s);' else: wrap = '\nhljs.registerLanguage(\'%s\', %s);\n' return wrap % (name, content)
use std::fs::OpenOptions; use std::panic; use anyhow::{anyhow, Context, Result}; use backtrace::Backtrace; use clap::{App, Arg}; use tracing::{error, info}; use tracing_appender; use tracing_subscriber; use zanthe::run; const APP_VERSION: &str = env!("CARGO_PKG_VERSION"); fn main() { let log_file = OpenOptions::new() .read(true) .append(true) .create(true) .open("main.log") .expect("Could not prepare log file"); let (writer, _guard) = tracing_appender::non_blocking(log_file); tracing_subscriber::fmt() .with_writer(writer) .with_ansi(false) .with_max_level(tracing::Level::INFO) .init(); panic::set_hook(Box::new(|panic_info| { let backtrace = Backtrace::new(); error!("{}\n{:?}", panic_info, backtrace); })); let args = App::new("Zanthe") .version(APP_VERSION) .about("A Z-Machine interpreter") .arg( Arg::with_name("INPUT") .help("Input file") .required(true) .index(1), ) .arg( Arg::with_name("interface") .short("i") .help("The interface to use") .takes_value(true) .default_value("terminal") .possible_values(&["terminal"]), ) .arg( Arg::with_name("debug") .short("d") .help("Enable debug logging"), ) .get_matches(); if let Err(e) = run(args) { eprintln!("{}", e); error!("Exited with error: {}", e); std::process::exit(1); } info!("Exited normally"); }
Transparent Conductive Adhesives for Tandem Solar Cells Using Polymer-Particle Composites. Transparent conductive adhesives (TCAs) can enable conductivity between two substrates, which is useful for a wide range of electronic devices. Here, we have developed a TCA composed of a polymer-particle blend with ethylene-vinyl acetate as the transparent adhesive and metal-coated flexible poly(methyl methacrylate) microspheres as the conductive particles that can provide conductivity and adhesion regardless of the surface texture. This TCA layer was designed to be nearly transparent, conductive in only the out-of-plane direction, and of practical adhesive strength to hold the substrates together. The series resistance was measured at 0.3 and 0.8 Ω cm2 for 8 and 0.2% particle coverage, respectively, while remaining over 92% was transparent in both cases. For applications in photovoltaic devices, such as mechanically stacked multijunction III-V/Si cells, a TCA with 1% particle coverage will have less than 0.5% power loss due to the resistance and less than 1% shading loss to the bottom cell.
def from_def(cls, obj): prof = cls(obj["steamid"]) prof._cache = obj return prof
Laser-Induced Fluorescence Studies of Hematoporphyrin Derivative (HPD) in Normal and Tumor Tissue of Rat Fluorescence studies of hematoporphyrin derivative (HPD) in normal and tumor tissue of rat were performed with nitrogen laser excitation and optical multi-channel detection. Fifteen types of tissue including inoculated tumor were investigated for rats at different delays after HPD injection. Optimum contrast functions and other criteria for discriminating tumor tissue from normal tissue are discussed. The results should have implications for practical human HPD endoscopy.
/** * Tokenizes the input text and returns a string corresponding to the tokens * as one token per line * * @param text (String plain text) * @return tokens (String) */ public String tokenize(String text) { String tokens = ""; if (text != null && text.length() != 0) { text = basicSpacePattern.matcher(text).replaceAll(" "); for (Pattern pattern : patterns) { text = pattern.matcher(text).replaceAll(" $1 "); } for (Pattern pattern : twoWordContractionPatterns) { text = pattern.matcher(text).replaceAll(" $1 $2"); } for (Pattern pattern : threeWordContractionsPatterns) { text = pattern.matcher(text).replaceAll(" $1 $2 $3"); } text = periodPattern.matcher(text).replaceAll(" . "); if (System.getProperty("DEBUG") != null && System.getProperty("DEBUG").equalsIgnoreCase("true")) { System.out.println(text); } text = basicSpacePattern.matcher(text).replaceAll(" "); text = text.trim(); if (!text.isEmpty() && !text.equals("")) { tokens = text.toString().replaceAll("\\s+", "\n"); tokens += "\n"; if (System.getProperty("DEBUG") != null && System.getProperty("DEBUG").equalsIgnoreCase("true")) { System.out.println("TOKENS:\n"+tokens); } if (doSentSplit) { tokens = tokens.replaceAll("\n([.!?])\n", "\n$1\n\n"); } } } return tokens; }
<filename>spec/renderer/integration/store/Preferences/General.spec.ts import { createLocalVue } from '@vue/test-utils' import Vuex from 'vuex' import { ipcMain, ipcRenderer } from '~/spec/mock/electron' import General, { GeneralState } from '@/store/Preferences/General' import { MyWindow } from '~/src/types/global' import { IpcMainInvokeEvent } from 'electron' ;((window as any) as MyWindow).ipcRenderer = ipcRenderer const state = (): GeneralState => { return { general: { sound: { fav_rb: true, toot: true }, timeline: { cw: false, nsfw: false, hideAllAttachments: false }, other: { launch: false } }, loading: false } } const initStore = () => { return { namespaced: true, state: state(), actions: General.actions, mutations: General.mutations } } const app = { namespaced: true, actions: { loadPreferences(_) { return true } } } describe('Preferences/General', () => { let store let localVue beforeEach(() => { localVue = createLocalVue() localVue.use(Vuex) store = new Vuex.Store({ modules: { Preferences: initStore(), App: app } }) }) describe('loadGeneral', () => { beforeEach(() => { ipcMain.handle('get-preferences', () => { return { general: { sound: { fav_rb: false, toot: false } } } }) }) afterEach(() => { ipcMain.removeHandler('get-preferences') }) it('should be updated', async () => { await store.dispatch('Preferences/loadGeneral') expect(store.state.Preferences.general.sound.fav_rb).toEqual(false) expect(store.state.Preferences.general.sound.toot).toEqual(false) expect(store.state.Preferences.loading).toEqual(false) }) }) describe('updateSound', () => { beforeEach(() => { ipcMain.handle('update-preferences', (_: IpcMainInvokeEvent, config: any) => { return config }) }) afterEach(() => { ipcMain.removeHandler('update-preferences') }) it('should be updated', async () => { await store.dispatch('Preferences/updateSound', { fav_rb: false, toot: false }) expect(store.state.Preferences.general.sound.fav_rb).toEqual(false) expect(store.state.Preferences.general.sound.toot).toEqual(false) expect(store.state.Preferences.loading).toEqual(false) }) }) describe('updateTimeline', () => { beforeEach(() => { ipcMain.handle('update-preferences', (_: IpcMainInvokeEvent, config: any) => { return config }) }) afterEach(() => { ipcMain.removeHandler('update-preferences') }) it('should be updated', async () => { await store.dispatch('Preferences/updateTimeline', { cw: true, nsfw: true, hideAllAttachments: true }) expect(store.state.Preferences.general.timeline.cw).toEqual(true) expect(store.state.Preferences.general.timeline.nsfw).toEqual(true) expect(store.state.Preferences.general.timeline.hideAllAttachments).toEqual(true) expect(store.state.Preferences.loading).toEqual(false) }) }) })
<reponame>kshinde2512/OpenMetadata /* * Copyright 2021 Collate * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { findByTestId, findByText, queryByTestId, render, } from '@testing-library/react'; import React from 'react'; import { MemoryRouter } from 'react-router-dom'; import FeedCardHeader from './FeedCardHeader'; const FQN = 'service.database.schema.table'; const type = 'table'; const expectedDisplayName = 'database.schema.table'; jest.mock('../../../axiosAPIs/userAPI', () => ({ getUserByName: jest.fn().mockReturnValue({}), })); jest.mock('../../../utils/CommonUtils', () => ({ getPartialNameFromFQN: jest.fn().mockReturnValue('feedcard'), getNonDeletedTeams: jest.fn().mockReturnValue([]), getEntityName: jest.fn().mockReturnValue('entityname'), getPartialNameFromTableFQN: jest.fn().mockImplementation(() => { return expectedDisplayName; }), })); jest.mock('../../../utils/TableUtils', () => ({ getEntityLink: jest.fn(), })); jest.mock('../../../utils/TimeUtils', () => ({ getDayTimeByTimeStamp: jest.fn(), })); jest.mock('../../common/ProfilePicture/ProfilePicture', () => { return jest.fn().mockReturnValue(<p>ProfilePicture</p>); }); const mockFeedHeaderProps = { createdBy: 'xyz', entityFQN: 'x.y.v.z', entityField: 'z', entityType: 'y', isEntityFeed: true, timeStamp: 1647322547179, }; describe('Test Feedheader Component', () => { it('Checks if the Feedheader component has isEntityFeed as true', async () => { const { container } = render(<FeedCardHeader {...mockFeedHeaderProps} />, { wrapper: MemoryRouter, }); const createdBy = await findByText(container, /xyz/i); const headerElement = await findByTestId(container, 'headerText'); const entityFieldElement = await findByTestId( container, 'headerText-entityField' ); const entityTypeElement = queryByTestId(container, 'entityType'); const entityLinkElement = queryByTestId(container, 'entitylink'); const timeStampElement = await findByTestId(container, 'timestamp'); expect(createdBy).toBeInTheDocument(); expect(headerElement).toBeInTheDocument(); expect(entityFieldElement).toBeInTheDocument(); expect(entityTypeElement).not.toBeInTheDocument(); expect(entityLinkElement).not.toBeInTheDocument(); expect(timeStampElement).toBeInTheDocument(); }); it('Checks if the Feedheader component has isEntityFeed as false', async () => { const { container } = render( <FeedCardHeader {...mockFeedHeaderProps} isEntityFeed={false} />, { wrapper: MemoryRouter, } ); const createdBy = await findByText(container, /xyz/i); const headerElement = await findByTestId(container, 'headerText'); const entityFieldElement = queryByTestId( container, 'headerText-entityField' ); const entityTypeElement = await findByTestId(container, 'entityType'); const entityLinkElement = await findByTestId(container, 'entitylink'); const timeStampElement = await findByTestId(container, 'timestamp'); expect(createdBy).toBeInTheDocument(); expect(headerElement).toBeInTheDocument(); expect(entityFieldElement).not.toBeInTheDocument(); expect(entityTypeElement).toBeInTheDocument(); expect(entityLinkElement).toBeInTheDocument(); expect(timeStampElement).toBeInTheDocument(); }); it('Should show link text as `database.schema.table` if entity type is table', async () => { const { container } = render( <FeedCardHeader {...mockFeedHeaderProps} entityFQN={FQN} entityType={type} isEntityFeed={false} />, { wrapper: MemoryRouter, } ); const entityTypeElement = await findByTestId(container, 'entityType'); const entityLinkElement = await findByTestId(container, 'entitylink'); expect(entityTypeElement).toBeInTheDocument(); expect(entityLinkElement).toBeInTheDocument(); expect(entityTypeElement).toHaveTextContent(type); expect(entityLinkElement).toHaveTextContent(expectedDisplayName); }); });