content
stringlengths
10
4.9M
/** * Contains the window-relative coordinates of the current fragment * * @author dennis.ippel * */ protected final class GLFragCoord extends RVec4 { public GLFragCoord() { super("gl_FragCoord"); mInitialized = true; } }
use core::fmt; use crate::{Distribution, Random, Rng}; use crate::distributions::{SampleUniform, UniformSampler}; #[inline] fn wmul32(a: u32, b: u32) -> (u32, u32) { let full = a as u64 * b as u64; let msw = (full >> 32) as u32; let lsw = (full & 0xffffffff) as u32; (msw, lsw) } #[inline] fn wmul64(a: u64, b: u64) -> (u64, u64) { let full = a as u128 * b as u128; let msw = (full >> 64) as u64; let lsw = (full & 0xffffffffffffffff) as u64; (msw, lsw) } /// Uniform distribution over integral types. /// /// # Implementation notes /// /// For simplicity, we use the same generic struct `UniformInt<T>` for all integer types `T`. /// This gives us only one field type, `T`; to store unsigned values of this size, we take use the fact that these conversions are no-ops. /// /// For a closed range, the number of possible numbers we should generate is `range = (high - low + 1)`. /// To avoid bias, we must ensure that the size of our sample space, is a multiple of `range`; /// other values must be rejected (by replacing with a new random sample). /// See [Fast Random Integer Generation in an Interval](https://arxiv.org/abs/1805.10941) for the algorithm used. /// /// As a special case, we use `range = 0` to represent the full range of the result type (i.e. for the full inclusive range). /// /// For more information on this bias see the `examples/int_bias.rs` example. #[derive(Copy, Clone, Debug)] pub struct UniformInt<T> { base: T, // When T is signed, it is really an unsigned integer of the same size range: T, } impl<T> UniformInt<T> { pub(crate) const fn constant(base: T, range: T) -> UniformInt<T> { UniformInt { base, range } } } macro_rules! impl_uniform_int { ($ty:ty, $unsigned:ty, $large:ty, $method:ident, $wmul:ident) => { impl SampleUniform for $ty { type Sampler = UniformInt<$ty>; } impl UniformSampler<$ty> for UniformInt<$ty> { #[inline] fn new(low: $ty, high: $ty) -> UniformInt<$ty> { if low >= high { uniform_int_new_error(low, high); } // `high - low` may overflow for signed integers let range = high.wrapping_sub(low) as $unsigned as $ty; UniformInt { base: low, range } } #[inline] fn new_inclusive(low: $ty, high: $ty) -> UniformInt<$ty> { if low > high { uniform_int_new_inclusive_error(low, high); } // `high - low` may overflow for signed integers let range = high.wrapping_sub(low).wrapping_add(1) as $unsigned as $ty; UniformInt { base: low, range } } } impl Distribution<$ty> for UniformInt<$ty> { #[inline] fn sample<R: Rng + ?Sized>(&self, rng: &mut Random<R>) -> $ty { let range = self.range as $unsigned as $large; let mut zone = range; loop { let v = rng.$method(); if range == 0 { break v as $ty; } let (msw, lsw) = $wmul(v, range); if lsw >= zone { break self.base.wrapping_add(msw as $ty); } if zone == range { zone = <$large>::wrapping_sub(0, range) % range; if lsw >= zone { break self.base.wrapping_add(msw as $ty); } } } } } }; } impl_uniform_int! { i8, u8, u32, next_u32, wmul32 } impl_uniform_int! { u8, u8, u32, next_u32, wmul32 } impl_uniform_int! { i16, u16, u32, next_u32, wmul32 } impl_uniform_int! { u16, u16, u32, next_u32, wmul32 } impl_uniform_int! { i32, u32, u64, next_u64, wmul64 } impl_uniform_int! { u32, u32, u64, next_u64, wmul64 } impl_uniform_int! { i64, u64, u64, next_u64, wmul64 } impl_uniform_int! { u64, u64, u64, next_u64, wmul64 } // Interestingly make usize/isize use the same code paths // This keeps the result deterministic regardless of pointer width #[cfg(target_pointer_width = "32")] impl_uniform_int! { isize, u32, u64, next_u64, wmul64 } #[cfg(target_pointer_width = "32")] impl_uniform_int! { usize, u32, u64, next_u64, wmul64 } #[cfg(target_pointer_width = "64")] impl_uniform_int! { isize, u64, u64, next_u64, wmul64 } #[cfg(target_pointer_width = "64")] impl_uniform_int! { usize, u64, u64, next_u64, wmul64 } #[cold] fn uniform_int_new_error<T: fmt::Debug>(low: T, high: T) -> ! { panic!("UniformSampler::new called with `low >= high` where low: {:?} and high: {:?}", low, high); } #[cold] fn uniform_int_new_inclusive_error<T: fmt::Debug>(low: T, high: T) -> ! { panic!("UniformSampler::new_inclusive called with `low > high` where low: {:?} and high: {:?}", low, high); } //---------------------------------------------------------------- #[test] fn test_bias() { let distr = UniformInt::new_inclusive(0u32, 0xC0000000); println!("distr: {:#x?}", distr); let mut rng = crate::new(); let mut buckets = [0u32; 3]; for _ in 0..10000 { let value = rng.sample(&distr); if value < 0x40000000 { buckets[0] += 1; } else if value < 0x80000000 { buckets[1] += 1; } else if value <= 0xC0000000 { buckets[2] += 1; } else { panic!("value: {:#x}", value); } } let mean = (buckets[0] as i64 + buckets[1] as i64 + buckets[2] as i64) / 3; let pass = buckets.iter().all(|&odd| (odd as i64 - mean).abs() < 1000); println!("mean:{} buckets:{:?} pass:{}", mean, buckets, pass); assert!(pass); } #[test] fn test_edges_large() { let distr = UniformInt::new_inclusive(u32::MIN, u32::MAX); println!("distr: {:#x?}", distr); let mut rng = crate::new(); let mut zeros = 0; for _ in 0..10000 { let value = rng.sample(&distr); if value == 0 { zeros += 1; } } assert!(zeros < 5, "found {} zero samples!", zeros); } #[test] fn test_edges_small() { let distr1 = UniformInt::new_inclusive(10, 10); let distr2 = UniformInt::new(23, 24); let mut rng = crate::new(); for _ in 0..100 { let value1 = rng.sample(&distr1); let value2 = rng.sample(&distr2); assert_eq!(value1, 10); assert_eq!(value2, 23); } } #[test] fn test_yolo() { let mut rng = crate::new(); for _ in 0..10000 { let mut low: i16 = rng.next(); let mut high: i16 = rng.next(); if high < low { let tmp = low; low = high; high = tmp; } let value = rng.range(low..=high); assert!(value >= low && value <= high); if low != high { let value = rng.range(low..high); assert!(value >= low && value < high); } } }
This article is from the archive of our partner . Authorities have confirmed tor the first time ever, that hackers attempted and almost succeeded at rigging a Miami primary vote, uncovering underlying security issues with the online voting systems of the future. In the Miami-Dade primary election last August, requests for over 2,500 phantom absentee ballots flooded the Miami Dade voter registration site, a phenomenon which a grand jury has now confirmed came from hackers, reports MSNBC's Gil Aegerter. Because it had some hallmarks of trickery, the election department's software was able to halt the scheme before it actually affected the election. But, the scarier part is how easy the hack was to perform, as the Miami Herald's Patricia Mazzei explains. With a tiny bit more skill, this person could have bypassed the trigger that caught the hack. "And that, of course, is the most frightening thing: that any moderately or even marginally skilled programmer could have done this," Steven Rambam, who reviewed the IP addresses associated with this hack told Mazzei. So, yeah, this is just the beginning. Specifically, the still unknown party built a program that rapid-fire filled out online ballot requests user voter information for people who would likely not participate in a primary election. To make the absentee ballot requests seem legit, that person then made the IP addresses look like they came from a foreign country. This time, the requests were flagged as suspicious because they came in so quickly and also targeted Democratic voters in specific elections. In addition a Captcha system can also detect these types of automated requests. However, the president of the company that provides that software to Miami-Dade and 52 other counties admitted that's not hard to bypass. "That’s a barrier, but I’m told that for someone who’s sophisticated enough as a programmer, they can get over that hurdle," Jane Watson told Mazzei. Services out there would cost less then $0.001 per voter, claim resaerchers Aegerter spoke with. In other words, this amateur situation could have turned out a lot worse if someone with a little more experience had attempted the hack. Plus, as Augeter notes, this is just one of many methods to fraud online voting systems. This article is from the archive of our partner The Wire. We want to hear what you think about this article. Submit a letter to the editor or write to [email protected].
def task_version(): versionfiles = json.load(open(VERSION_JSON)) for filename, contents in versionfiles.iteritems(): svw = SotaVersionWriter(filename, contents % globals()) yield { 'name': filename, 'actions': [svw.update], 'uptodate': [svw.uptodate], }
Biologist in Puerto Rico report that at least 80 of the endangered Puerto Rican Parrots survived Hurricane Maria while exposed to the category 5 storm. The 175 parrots in captivity were unharmed. The Puerto Rican Parrot, also known as the Puerto Rican Amazon is the only remaining native parrot in Puerto Rico. The parrot has been on the critically endangered list since 1994. Back in 1989, Hurricane Hugo devastated the parrot population, destroying more than half the parrots in the Puerto Rican wild. By the end of 1989, just a small population of 22 birds remained. During both Hurricane’s Irma and Maria, the rare parrots in captivity were moved into a hurricane proof building. For the 2nd time in 2 weeks we’ve had to move all 175 parrots from the Aviary to the hurricane shelter. #MariaPR approaches as a category 5 pic.twitter.com/aqMLJU4Xy5 — Tanya Martínez (@iguacachick) September 19, 2017 After Irma hit Puerto Rico in early September it appeared the parrots in the wild dodged a bullet. The eye of Irma passed about 30 miles to the north of the island. While the damage from Irma was a major inconvience, the parrots weathered the storm. One Shirt = One Tree Round two with Maria was a much different story. The 155 mph winds destroyed much of the forest and stripped many of the trees bare. Biologist Tanya Martinez was able to tweet a few pics of the devastation four days after Maria passed. Our team made contact with our colleagues at the Aviary and they’re ok. But Rio Abajo can barely be called a forest anymore. #MariaPR pic.twitter.com/TjxXJfAF2M — Tanya Martínez (@iguacachick) September 26, 2017 Due to the storm, there are now no direct communications to the Rio Abajo Aviary. The site is where the birds are cared for in captivity and then released into the wild. A secondhand report from Ricardo Valentine, a biologist that cares for the endangered parrots, said with leaves missing on the trees the bright birds are easy to count. Valentine, stayed at the Rio Abajo Aviary while Maria battered the American territory. Valentine reports that the smaller breeding cages at the aviary are destroyed, and he’s not sure what type of funding they’ll receive for repairs. Meanwhile, a site for donations to the aviary has popped-up online and biologist are referring the public to it for help. Martinez says with much of the forest defoliated, the parrots are making good use of what they can find. The fruit of the royal palm survived and the parrots appear to be eating that. Rio Abajo is heavily defoliated. Only the fruit of the royal palm has survived. The parrots are making good use of this limited resource. pic.twitter.com/9IjTFhROkl — Tanya Martínez (@iguacachick) October 4, 2017 Without direct communication to the aviary, information is still very limited. We’ll update this story as more information is made available. All rights reserved. Enjoy The Silence 2017 Liked it? Take a second to support Nick Ortego on Patreon! Share this: Twitter Facebook Pinterest Reddit Email
He's not Grizzly Adams, nor Bear Grylls, but he has climbed Mount Everest. At 59, Gary Johnson still projects the energetic aura of an athlete. But these days, the two-time Republican governor of New Mexico and imminent Libertarian Party Presidential candidate has the rumpled look of someone who spends too much time in Starbucks hunched over a laptop. At a sandwich shop near Rockefeller Center where we met for an interview last week, he talks with a quiet kind of energy: non-intimidating; a bit self-effacing, but sincere. His voice is not mellifluous like Obama's; his style is nothing like Mitt's trying-too-hard; and his rhetoric is far from Santorum's coarse and unbalanced rambling. Johnson's speech lacks the "uhs," "y'knows" or similar pauses that usually indicate a bad case of public overthink. No, Johnson speaks with the conviction of a true believer, one convinced that abandoning the Republican Party for a run as a Libertarian will sow seeds that will take root -- if not this year, then perhaps in 2016. The preening and posturing of Romney and Santorum, looking to score at the socially conservative beauty contest, are anathema to Johnson. He wants to stick close to Libertarian core values, and if that means butting heads with former Libertarian Party presidential candidate (1988) Ron Paul, so be it. Abortion? Where Ron Paul waffled, throwing that decision to the states, Johnson is clear: "A woman should be making that decision," he says. Foreign Aid: especially to Israel? Paul says no. Johnson says yes. "Israel has been a valuable ally," he insists. While Paul reasserts Libertarianism's commitment to the "individual" as the paramount decision maker, Johnson clearly makes a concession to real-politick, where rewriting the narrative may be essential to drawing a wider range of potential supporters: disaffected Democrats, moderate Republicans and everything in-between. Maybe it's something about his mountain climbing that makes him eager to ride that risky political third rail. Drugs: they should be legal. In 1999 as Governor, he was the highest ranking elected official to call for legalizing weed, citing all the wasteful spending trying to enforce unenforceable laws. Anyway, he says, it's a matter of personal choice. Illegal immigration? He's a former border state Governor calling for more work visas and less chain link. In his view, we should turn illegals into welcome guests who might reasonably be expected "to pay taxes, pay for health care and otherwise be contributors to society rather than burdens." It's the kind of common sense approach guaranteed to cause Lou Dobbs meltdown among mainstream Republicans. He's all about common sense, at least the Libertarian version. Do we really need expensive micro-managing bureaucracies like education and housing and development? No. They're unnecessary. Get rid of that old bogeyman, the Fed? "Absolutely. " Johnson owes his allegiance to Milton Friedman's theory of free market economics: if a bank is going to fail, okay, let it go -- bye-bye. His economic goal is to slash and burn till you get to that state of grace: a balanced budget. As Governor, he'll tell you, the veto was his machete, which he used so consistently that he was dubbed -- yes -- 'Mr. Veto.' If a bill submitted to his cost/benefit analysis didn't make the grade, out it went. Under his tutelage, government growth was slowed to a crawl and citizens of the State didn't mind one bit; they lauded him. Cited as one of New Mexico's most popular Governors, even opponents, like Democratic Congressman Tom Udall, sang his praises. If not for term limits, Johnson may have had himself a job for life. Hitting the refresh button, Johnson goes to great lengths to separate himself from any notion that he's an "extremism in defense of Liberty," candidate, a la Barry Goldwater. He'll spare a few Federal bureaucracies, with appropriate cost cutting. That means the Department of Justice will survive because, as he'll tell you, there's a role for the Feds in guaranteeing civil rights from sea to shining sea. This places him at odds with some in the Ron Paul camp who've called for a repeal of the 1964 Voting Rights act as antithetical to "free association." In Gary Johnson's world view, individual rights - civil rights - still need protection; noting, "If the federal government didn't pass the civil rights legislation, what would life be like in Alabama and Mississippi?" While agreeing with Paul about scrapping Orwellian constructs -- namely, Homeland Security and TSA -- he goes easy on the Environmental Protection Agency, recalling his own experience as Governor dealing with some "really bad actors on the pollution front." So the mountain climber now wants to climb this country's ultimate political Everest. And he wants the voting public to believe that, as a third party candidate, he can succeed, which brings us back to the mountain climbing thing. It's a pursuit that usually draws the likes of either inspired adventurers, like Sir Edmund Hillary; monks of the Taoist variety; or the truly, truly crazy, like British occultist Aleister Crowley. "Why do you?" was the question. "When you're mountaineering, all you have to worry about is shitting, pissing eating and keeping warm...it's so in the moment," he responded with relish. "When you're hanging off a cliff, suspended on a rope, there's just right now and how in the fuck am I going to get out of this?" He punctuated his storytelling with: "It's so cool. " Was he channeling early Jerry Brown? "What we're all in search of in our lives is a state of Zen," he says, as if to affirm that likeness. Being governor was "fun," he recalled, "because it was, [like mountain climbing] in the moment." Will it still be fun when he takes the reins of that peculiar Libertarian beast; the pushmi-pullyu, of Dr. Doolittle fame; heads on opposite ends, trying and failing to get traction in either direction? Campaigning, he notes, is not "fun," but if he can get the needed poll numbers, he vows to ice-pick his way onto the stage as a participant in the Grand Presidential Debate, where he believes that the force of his arguments - and the common sense notions of libertarianism - may turn a few million heads in his direction..
<reponame>RSaab/rss-scraper<gh_stars>0 # Generated by Django 3.1 on 2020-08-13 16:23 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('rss_feeder_api', '0002_feed_subtitle'), ] operations = [ migrations.AlterModelOptions( name='entry', options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'}, ), migrations.AlterModelOptions( name='feed', options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'}, ), migrations.AddField( model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='entry', name='updated_at', field=models.DateTimeField(auto_now=True), ), migrations.AlterUniqueTogether( name='entry', unique_together={('guid',)}, ), ]
New Privacy Protocol Zether Can Conceal Ethereum Transactions - Blockonomi New Privacy Protocol Zether Can Conceal Ethereum Transactions BlockonomiAttention has been increasing around Zether, a privacy protocol that's been developed and proposed for smart contract cryptocurrencies. Cryptocurrency Miners are Making Millions for Doing Absolutely Nothing - CCN Cryptocurrency Miners are Making Millions for Doing Absolutely Nothing CCNThe latest issue of Diar points out that Bitcoin miners and other crypto miners have made tens of millions of dollars without processing a single transaction. Zcash (ZEC) 1-Day Volume Hits $160.89 Million - Fairfield Current Zcash (ZEC) 1-Day Volume Hits $160.89 Million Fairfield CurrentZcash (CURRENCY:ZEC) traded down 0.3% against the dollar during the 1-day period ending at 21:00 PM Eastern on February 26th. Zcash has a total market ... Zcash Price Down 4.3% Over Last 7 Days (ZEC) - Fairfield Current Zcash Price Down 4.3% Over Last 7 Days (ZEC) Fairfield CurrentZcash (CURRENCY:ZEC) traded 0.3% lower against the US dollar during the 24 hour period ending at 16:00 PM Eastern on February 19th. One Zcash coin can ... Zcash Price Changed by -0.13 percent - ICO Brothers Zcash Price Changed by -0.13 percent ICO BrothersAs at 2019-02-26 average Zcash price is 52.79137170 USD, 0.01369795 BTC, 0.38575049 ETH. It's noteworthy that is issued into circulation Zcash.
/** * Created by hxlin on 9/20/15. */ public class LibraryTests { private List<Book> books; private Library library; @Before public void setUp() { books = new ArrayList<Book>(); library = new Library(books); } @Test public void getAllBooksFromLibrary() throws Exception{ assertEquals(books, library.getBooks()); } @Test public void showBooksDetailsInSring() throws Exception{ Book book = mock(Book.class); when(book.getDetails()).thenReturn("abracadabra"); books.add(book); String booksDetails = library.showBooks(); assertThat(booksDetails, is("abracadabra\n")); } }
<filename>Pipeline.cpp #include "Pipeline.h" //If arguments are good returns a cleaned version of them //First one is input, second one is output std::array <std::wstring, 2> Pipeline::CheckArguments (const std::wstring Arg1, const std::wstring Arg2) { std::array<std::wstring, 2> Arguments; //Input if (!Arg1.rfind(L"-i", 0)) { Arguments[0] = Arg1.substr(2, Arg1.size()); } else if (!Arg2.rfind(L"-i", 0)) { Arguments[0] = Arg2.substr(2, Arg2.size()); } //Output if (!Arg1.rfind(L"-o", 0)) { Arguments[1] = Arg1.substr(2, Arg1.size()); } else if (!Arg2.rfind(L"-o", 0)) { Arguments[1] = Arg2.substr(2, Arg2.size()); } return Arguments; } std::unique_ptr<std::wistream> Pipeline::PipeInput; std::unique_ptr<std::wostream> Pipeline::PipeOutput; //If argument is good for selected mode returns a cleaned version of it //True = Input //False = Output std::wstring Pipeline::CheckArgument (const std::wstring Arg, bool Mode) { std::wstring Argument; if (Mode) { if (!Arg.rfind(L"-i", 0)) { Argument = Arg.substr(2, Arg.size()); } } else { if (!Arg.rfind(L"-o", 0)) { Argument = Arg.substr(2, Arg.size()); } } return Argument; } //1 = Only Input is set //2 = Only Output is set //3 = Both are set //0 = Nothing was changed //-1 = Bad Input (Output is never checked in this case) //-2 = Bad Output //-3 = Invalid number of arguments // -4 = Unforseen error int Pipeline::MakePipe (const std::vector<std::wstring> & Args) { if (!Args.size()) { return 0; } else if (Args.size() == 1) { std::wstring Argument = Pipeline::CheckArgument (Args[0], true); if (Argument.size()) { Pipeline::PipeInput.reset(new std::wifstream (std::filesystem::path(Argument))); if (!Pipeline::PipeInput) { std::wcerr << L"Bad input file\n"<< std::flush; return -1; } return 1; } else { Argument = CheckArgument (Args[0], false); if (Argument.size()) { Pipeline::PipeOutput.reset(new std::wofstream (std::filesystem::path(Argument))); if (!Pipeline::PipeOutput) { std::wcerr << L"Bad output file\n"<< std::flush; return -2; } return 2; } } } else if (Args.size() == 2) { int Counter = 0; std::array<std::wstring, 2> IOArgs = Pipeline::CheckArguments (Args[0], Args[1]); if (IOArgs[0].size()) { Pipeline::PipeInput.reset(new std::wifstream (std::filesystem::path(IOArgs[0]))); if (!Pipeline::PipeInput) { std::wcerr << L"Bad input file\n"<< std::flush; return -1; } ++Counter; } if (IOArgs[1].size()) { Pipeline::PipeOutput.reset(new std::wofstream (std::filesystem::path(IOArgs[1]))); if (!Pipeline::PipeOutput) { std::wcerr << L"Bad output file\n"<< std::flush; return -2; } Counter += 2; } if (Counter) {return Counter;} } else { std::wcerr << L"Invalid number of arguments\n"<< std::flush; return -3; } return -4; }
<reponame>shawncarney/rock-paper-jesus /* * Power BI Visualizations * * Copyright (c) Microsoft Corporation * All rights reserved. * MIT License * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the ""Software""), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ /* * This file is based on or incorporates material from the projects listed below (Third Party IP). * The original copyright notice and the license under which Microsoft received such Third Party IP, * are set forth below. Such licenses and notices are provided for informational purposes only. * Microsoft licenses the Third Party IP to you under the licensing terms for the Microsoft product. * Microsoft reserves all other rights not expressly granted under this agreement, whether by * implication, estoppel or otherwise. * * d3 Force Layout * Copyright (c) 2010-2015, <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * The name <NAME> may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <NAME> BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /// <reference path="../../_references.ts"/> module powerbi.visuals.samples { export interface ForceGraphData { nodes: any; links: any; minFiles: number; maxFiles: number; linkedByName: any; } export class ForceGraph implements IVisual { public static capabilities: VisualCapabilities = { dataRoles: [ { name: 'Values', kind: VisualDataRoleKind.GroupingOrMeasure, }, ], objects: { general: { properties: { formatString: { type: { formatting: { formatString: true } }, }, }, } }, dataViewMappings: [{ table: { rows: { for: { in: 'Values' }, dataReductionAlgorithm: { window: {} } }, rowCount: { preferred: { min: 1 } } }, }], suppressDefaultTitle: true, }; private static VisualClassName = 'forceGraph'; private root: D3.Selection; private paths: D3.Selection; private nodes: D3.Selection; private dataView: DataView; private data: ForceGraphData; private forceLayout: D3.Layout.ForceLayout; private marginValue: IMargin; private get margin(): IMargin { return this.marginValue || { left: 0, right: 0, top: 0, bottom: 0 }; } private set margin(value: IMargin) { this.marginValue = $.extend({}, value); this.viewportInValue = ForceGraph.substractMargin(this.viewport, this.margin); } private viewportValue: IViewport; private get viewport(): IViewport { return this.viewportValue || { width: 0, height: 0 }; } private set viewport(value: IViewport) { this.viewportValue = $.extend({}, value); this.viewportInValue = ForceGraph.substractMargin(this.viewport, this.margin); } private viewportInValue: IViewport; private get viewportIn(): IViewport { return this.viewportInValue || this.viewport; } private static substractMargin(viewport: IViewport, margin: IMargin): IViewport { return { width: Math.max(viewport.width - (margin.left + margin.right), 0), height: Math.max(viewport.height - (margin.top + margin.bottom), 0) }; } // converts data from Values to two dimensional array // expected order: MemberFrom MemberTo Value Valu2 (optional - for coloring) public static converter(dataView: DataView): ForceGraphData { var nodes = {}; var minFiles = Number.MAX_VALUE; var maxFiles = 0; var linkedByName = {}; //var links = [ // { "source": "john", "target": "joe", "filecount": 50 }, // { "source": "john", "target": "bob", "filecount": 150 }, // { "source": "mary", "target": "joe", "filecount": 80 }, // { "source": "bob", "target": "mary", "filecount": 70 }, // { "source": "joe", "target": "bob", "filecount": 20 }, //]; //links.forEach(function (link) { // link.source = nodes[link.source] || // (nodes[link.source] = { name: link.source }); // link.target = nodes[link.target] || // (nodes[link.target] = { name: link.target }); // //link.value = +link.filecount; // if (link.filecount < minFiles) { minFiles = link.filecount }; // if (link.filecount > maxFiles) { maxFiles = link.filecount }; // linkedByName[link.source.name + "," + link.target.name] = 1; //}); var links = []; //var rows = [ // ["Harry", "Sally", 4631], // ["Harry", "Mario", 4018] //]; if (dataView && dataView.table) { var rows = dataView.table.rows; rows.forEach(function (item) { linkedByName[item[0] + "," + item[1]] = 1; var link = { "source": nodes[item[0]] || (nodes[item[0]] = { name: item[0] }), "target": nodes[item[1]] || (nodes[item[1]] = { name: item[1] }), "filecount": item[2] }; if (link.filecount < minFiles) { minFiles = link.filecount; } if (link.filecount > maxFiles) { maxFiles = link.filecount; } links.push(link); }); }; var data = { "nodes": nodes, "links": links, "minFiles": minFiles, "maxFiles": maxFiles, "linkedByName": linkedByName }; return data; } public init(options: VisualInitOptions): void { this.root = d3.select(options.element.get(0)); this.forceLayout = d3.layout.force(); } public update(options: VisualUpdateOptions) { if (!options.dataViews || (options.dataViews.length < 1)) return; this.data = ForceGraph.converter(this.dataView = options.dataViews[0]); this.viewport = options.viewport; var k = Math.sqrt(Object.keys(this.data.nodes).length / (this.viewport.width * this.viewport.height)); this.root.selectAll("svg").remove(); var svg = this.root .append("svg") .attr("width", this.viewport.width) .attr("height", this.viewport.height) .classed(ForceGraph.VisualClassName, true); this.updateNodes(); this.forceLayout .links(this.data.links) .gravity(100 * k) .size([this.viewport.width, this.viewport.height]) .linkDistance(100) .charge(-15 / k) .on("tick", this.tick()); this.updateNodes(); this.forceLayout.start(); var scale0to100 = d3.scale.linear().domain([this.data.minFiles, this.data.maxFiles]).range([2, 10]).clamp(true); this.paths = svg.selectAll(".link") .data(this.forceLayout.links()) .enter().append("path") .attr("class", "link") .attr("stroke-width", d => scale0to100(d.filecount)) .on("mouseover", this.fadePath(.3)) .on("mouseout", this.fadePath(1)); this.paths.append("title").text(d => d.source.name + "-" + d.target.name + ":" + d.filecount); // define the nodes this.nodes = svg.selectAll(".node") .data(this.forceLayout.nodes()) .enter().append("g") .attr("class", "node") .call(this.forceLayout.drag) .on("mouseover", this.fadeNode(.3)) .on("mouseout", this.fadeNode(1)) .on("mousedown", () => d3.event.stopPropagation()); // add the nodes this.nodes.append("circle") .attr("r", d => d.weight < 10 ? 10 : d.weight); // add the text this.nodes.append("text") .attr("x", 12) .attr("dy", ".35em") .text(d => d.name); } private updateNodes() { var oldNodes = this.forceLayout.nodes(); this.forceLayout.nodes(d3.values(this.data.nodes)); this.forceLayout.nodes().forEach((node, i) => { if (!oldNodes[i]) { return; } node.x = oldNodes[i].x; node.y = oldNodes[i].y; node.px = oldNodes[i].px; node.py = oldNodes[i].py; node.weight = oldNodes[i].weight; }); } private fadePath(opacity) { return (d) => { this.paths.style("stroke-opacity", o => o.source === d.source && o.target === d.target ? 1 : opacity); this.paths.style("stroke", o => o.source === d.source && o.target === d.target ? "#f00" : "#bbb"); }; } private fadeNode(opacity) { var isConnected = (a, b) => this.data.linkedByName[a.name + "," + b.name] || this.data.linkedByName[b.name + "," + a.name] || a.name === b.name; return (d) => { this.nodes.style("stroke-opacity", function (o) { var thisOpacity = isConnected(d, o) ? 1 : opacity; this.setAttribute('fill-opacity', thisOpacity); return thisOpacity; }); this.paths.style("stroke-opacity", o => o.source === d || o.target === d ? 1 : opacity); this.paths.style("stroke", o => o.source === d || o.target === d ? "#f00" : "#bbb"); }; } // add the curvy lines private tick() { var viewport = this.viewportIn; //"width/height * 20" seems enough to move nodes freely by force layout. var maxWidth = viewport.width * 20; var maxHeight = viewport.height * 20; var limitX = x => Math.max((viewport.width - maxWidth) / 2, Math.min((viewport.width + maxWidth) / 2, x)); var limitY = y => Math.max((viewport.height - maxHeight) / 2, Math.min((viewport.height + maxHeight) / 2, y)); var getPath = d => { d.source.x = limitX(d.source.x); d.source.y = limitY(d.source.y); d.target.x = limitX(d.target.x); d.target.y = limitY(d.target.y); var dx = d.target.x - d.source.x, dy = d.target.y - d.source.y, dr = Math.sqrt(dx * dx + dy * dy); return "M" + d.source.x + "," + d.source.y + "A" + dr + "," + dr + " 0 0,1 " + d.target.x + "," + d.target.y; }; return () => { this.paths.each(function () { this.parentNode.insertBefore(this, this); }); this.paths.attr("d", getPath); this.nodes.attr("transform", d => "translate(" + limitX(d.x) + "," + limitY(d.y) + ")"); }; } public destroy(): void { this.root = null; } } }
//! `UsbBus` implementation use core::{ mem, ptr::{self, NonNull}, slice, sync::atomic::{self, Ordering}, }; use usb_device::{ bus::{PollResult, UsbBus}, endpoint::{EndpointAddress, EndpointType}, UsbDirection, UsbError, }; use super::{ dqh::dQH, token::{Status, Token}, util::{self, Data, OneIndices, Ref}, Inner, Usbd, ENDPOINTS, }; use crate::{memlog, memlog_flush_and_reset, time}; impl UsbBus for Usbd { fn alloc_ep( &mut self, ep_dir: UsbDirection, ep_addr: Option<EndpointAddress>, ep_type: EndpointType, max_packet_size: u16, interval: u8, ) -> Result<EndpointAddress, UsbError> { // NOTE we are using this in single-threaded context so deadlocks are // impossible self.inner .lock(|inner| inner.alloc_ep(ep_dir, ep_addr, ep_type, max_packet_size, interval)) } fn enable(&mut self) { self.inner.lock(|inner| inner.enable()); } fn is_stalled(&self, _: EndpointAddress) -> bool { false } fn poll(&self) -> PollResult { self.inner.lock(|inner| inner.poll()) } fn read(&self, ep_addr: EndpointAddress, buf: &mut [u8]) -> Result<usize, UsbError> { self.inner.lock(|inner| inner.read(ep_addr, buf)) } fn reset(&self) { self.inner.lock(|inner| inner.reset()); } fn resume(&self) { // TODO do something in the `resume` callback } fn set_stalled(&self, _: EndpointAddress, stalled: bool) { if stalled { // FIXME handle stall conditions unimplemented!() } } fn suspend(&self) { // TODO do something in the `suspend` callback } fn set_device_address(&self, addr: u8) { self.inner.lock(|inner| inner.set_device_address(addr)); } fn write(&self, ep_addr: EndpointAddress, bytes: &[u8]) -> Result<usize, UsbError> { self.inner.lock(|inner| inner.start_write(ep_addr, bytes)) } } /// USB Reset Received const USBSTS_URI: u32 = 1 << 6; /// Start of Frame (SoF) received interrupt const USBSTS_SRE: u32 = 1 << 7; /// Port Change Detect const USBSTS_PCI: u32 = 1 << 2; /// TX Endpoint Enable const ENDPTCTRL_TXE: u32 = 1 << 23; /// TX Data Toggle Reset const ENDPTCTRL_TXR: u32 = 1 << 22; /// RX Endpoint Enable const ENDPTCTRL_RXE: u32 = 1 << 7; /// RX Data Toggle Reset const ENDPTCTRL_RXR: u32 = 1 << 6; impl Inner { // # UsbBus methods fn alloc_ep( &mut self, ep_dir: UsbDirection, ep_addr: Option<EndpointAddress>, ep_type: EndpointType, max_packet_size: u16, interval: u8, ) -> Result<EndpointAddress, UsbError> { memlog!( "alloc_ep(ep_dir={:?}, ep={:?}, ep_type={:?}, max_packet_size={}, interval={}) @ {:?}", ep_dir, ep_addr.map(|ep| ep.index()), ep_type, max_packet_size, interval, time::uptime(), ); assert_ne!(ep_type, EndpointType::Isochronous, "not supported"); let (ep_addr, dqh) = if let Some(ep_addr) = ep_addr { let dqh = self.get_dqh(ep_addr).ok_or(UsbError::EndpointOverflow)?; if self.is_ep_being_used(ep_addr) { return Err(UsbError::InvalidEndpoint); } (ep_addr, dqh) } else { // use the lowest endpoint address available that's not control 0 let mut ep_idx = 1; loop { let ep_addr = EndpointAddress::from_parts(ep_idx, ep_dir); let dqh = self.get_dqh(ep_addr).ok_or(UsbError::EndpointOverflow)?; if !self.is_ep_being_used(ep_addr) { break (ep_addr, dqh); } else { // try next ep_idx += 1; } } }; // NOTE(unsafe) hardware cannot yet access the dQH and dTD unsafe { dqh.set_max_packet_size(max_packet_size, true); // install a dTD for the endpoint let dtd = Ref::new(self.dtds.pop().expect("exhausted the dTD pool")); if ep_addr.is_out() && ep_addr.index() != 0 { // install buffer in the dTD let addr = NonNull::new_unchecked(if max_packet_size <= 64 { self.b64s .pop() .expect("OOM during 64-byte buffer request") .as_mut_ptr() } else if max_packet_size <= 512 { self.b512s .pop() .expect("OOM during 64-byte buffer request") .as_mut_ptr() } else { unimplemented!("buffers of {}-bytes are not available", max_packet_size) }); let mut token = Token::empty(); token.set_total_bytes(max_packet_size.into()); token.set_status(Status::active()); token.set_ioc(); dtd.set_token(token); dtd.set_pages(addr); dqh.set_address(addr); } dqh.set_next_dtd(Some(dtd)); } // NOTE no memory barrier here because we are not going to hand this to // the hardware just yet drop(dqh); // mark this endpoint as used self.mark_ep_as_used(ep_addr); // NOTE we should do endpoint configuration after the device has // transitioned from the 'Address' state to the 'Configured' state but // `usb_device` provides no hook to do that so we just do it here. if ep_addr.index() != 0 { self.configure_ep(ep_addr, ep_type); } Ok(ep_addr) } fn enable(&mut self) { /// Run/Stop. Writing a one to this bit will cause the controller to /// initialize an attach event const USBCMD_RS: u32 = 1; self.usb.USBCMD.rmw(|usbcmd| usbcmd | USBCMD_RS); } fn reset(&mut self) { // Handle a bus reset -- See section 54.4.6.2.1 of the ULRM // "Clear all setup token semaphores by reading the ENDPTSETUPSTAT // register and writing the same value back to the ENDPTSETUPSTAT // register" self.usb.ENDPTSETUPSTAT.rmw(|r| r); // "Clear all the endpoint complete status bits by reading the // ENDPTCOMPLETE" register and writing the same value back to the // ENDPTCOMPLETE register" self.usb.ENDPTCOMPLETE.rmw(|r| r); // "Cancel all primed status by waiting until all bits in the ENDPTPRIME // are 0 and then writing `!0` to ENDPTFLUSH" if util::wait_for_or_timeout(|| self.usb.ENDPTPRIME.read() == 0, 2 * consts::frame()) .is_err() { memlog!("reset: ENDPTPRIME timeout"); memlog_flush_and_reset!() } self.usb.ENDPTFLUSH.write(!0); /// Port Reset const PORTSC1_PR: u32 = 1 << 8; // "Read the reset bit in the PORTSC1 register and make sure that it is // still active" let portsc1 = self.usb.PORTSC1.read(); if portsc1 & PORTSC1_PR == 0 { memlog!( "reset: we were too slow at handling the bus reset? (PORTSC1={:#010x})", portsc1 ); } // clear the URI bit self.usb.USBSTS.write(USBSTS_URI); memlog!("finished handling bus reset @ {:?}", time::uptime()); crate::memlog_try_flush(); } fn poll(&mut self) -> PollResult { // TODO report `Suspend` and `Resume` events /// When a controller enters a suspend state from an active state const USBSTS_SLI: u32 = 1 << 8; /// System error const USBSTS_SEI: u32 = 1 << 4; // The Start of Frame (SoF) event will trigger the interrupt handler // roughly every 125 us. The SoF is synchronized to USB events. The // interrupt flag must be cleared to avoid missing the next SoF event self.usb.USBSTS.write(USBSTS_SRE); let sts = self.usb.USBSTS.read(); if sts & USBSTS_URI != 0 { memlog!("poll() -> Reset @ {:?}", time::uptime()); crate::memlog_try_flush(); self.last_poll_was_none = false; return PollResult::Reset; } let setupstat = self.usb.ENDPTSETUPSTAT.read() as u16; let mut complete = self.usb.ENDPTCOMPLETE.read(); if sts & USBSTS_PCI != 0 { self.port_change(); } if setupstat != 0 { // cache `setuptstat`; it needs special handling in `read` self.setupstat = Some(setupstat); } let txcomplete = complete >> 16; if txcomplete != 0 { for bit in OneIndices::of(txcomplete) { self.end_write(bit); } complete &= 0xffff; } if setupstat != 0 || self.ep_in_complete.is_some() || self.status_out != 0 || complete != 0 { let ep_setup = setupstat; let ep_in_complete = self.ep_in_complete.take().unwrap_or(0); // STATUS out needs to be reported after the IN data phase let ep_out = if self.status_out != 0 && ep_in_complete == 0 { mem::replace(&mut self.status_out, 0) } else { // the higher bits were cleared in the previous `if` block complete as u16 }; let data = Data { ep_in_complete, ep_setup, ep_out, }; memlog!("poll() -> {:?} @ {:?}", data, time::uptime()); crate::memlog_try_flush(); self.last_poll_was_none = false; return data.into(); } if !self.last_poll_was_none { self.last_poll_was_none = true; memlog!("poll() -> None"); } crate::memlog_try_flush(); PollResult::None } fn read(&mut self, ep_addr: EndpointAddress, buf: &mut [u8]) -> Result<usize, UsbError> { memlog!( "read(ep={}, cap={}, self.setupstat={:?}) ... @ {:?}", ep_addr.index(), buf.len(), self.setupstat, time::uptime() ); crate::memlog_try_flush(); let setupstat = if let Some(setupstat) = self.setupstat.as_mut() { let mask = 1 << ep_addr.index(); if *setupstat & mask != 0 { *setupstat &= !mask; Some(mask) } else { None } } else { None }; if self.setupstat == Some(0) { self.setupstat = None; } let dqh = self.get_dqh(ep_addr).ok_or(UsbError::InvalidEndpoint)?; let ep_mask = util::epaddr2endptmask(ep_addr); if let Some(setupstat) = setupstat { // SETUP packets need special handling because no dTD is used // see section 54.4.6.4.2.1 of the ULRM // 1. "Write 1 to clear corresponding bit in ENDPTSETUPSTAT" self.usb.ENDPTSETUPSTAT.write(u32::from(setupstat)); const CMD_SUTW: u32 = 1 << 13; let n = dQH::SETUP_BYTES; loop { // 2. "Write 1 to Setup Tripwire (SUTW) in USBCMD" self.usb.USBCMD.rmw(|cmd| cmd | CMD_SUTW); // 3. "Duplicate contents of dQH.SetupBuffer into local software // byte array" dqh.copy_setup_bytes(&mut buf[..n]); // 4. "Read Setup TripWire (SUTW) in USBCMD. If set continue; if // cleared go to 2" if self.usb.USBCMD.read() & CMD_SUTW != 0 { break; } else { continue; } } drop(dqh); // 5. "Write 0 to clear Setup Tripwire (SUTW) in USBCMD" self.usb.USBCMD.rmw(|cmd| cmd & !CMD_SUTW); self.clear_interrupt(); // repeat some of `usb_device` logic here because `usb_device` won't // trigger the STATUS out phase nor does it have hook for // SET_CONFIGURATION match &buf[..4] { // SET_ADDRESS -- no data phase [0, 5, _, _] => {} // SET_CONFIGURATION [0, 9, _, _] | // SET_INTERFACE [1, 11, _, _] => { // FIXME (a) we should only reset the endpoints when the // configuration changed. (b) we should only reset the // endpoints that are part of the new configuration for ep_addr in self.allocated_eps() { self.reset_ep(ep_addr) } } // GET_DESCRIPTOR [128, 6, _, _] => { self.pre_status_out = 1; } _ => { memlog!("unexpected SETUP packet: {:?}", &buf[..n]); memlog_flush_and_reset!(); } } memlog!("... {:?} @ {:?}", &buf[..n], time::uptime()); crate::memlog_try_flush(); Ok(n) } else if ep_addr.index() == 0 { // FIXME can we set up the buffer and prime the endpoint earlier? // the dTD should already be installed in `next_dtd` // TODO turn into debug_assertions unsafe { assert!(dqh.get_current_dtd().is_none()); assert!(dqh.get_next_dtd().is_some()); } // "Executing a transfer descriptor", section 54.4.6.6.3 let dtd = unsafe { dqh.get_next_dtd().expect("UNREACHABLE") }; let cap = buf.len(); unsafe { let mut token = Token::empty(); token.set_total_bytes(cap); token.set_status(Status::active()); token.set_ioc(); dtd.set_token(token); let addr = NonNull::new_unchecked(buf.as_ptr() as *mut u8); dtd.set_pages(addr); dqh.set_address(addr); } // force all previous memory operations to complete before // priming atomic::fence(Ordering::Release); // prime the endpoint self.usb.ENDPTPRIME.rmw(|prime| prime | ep_mask); // now the hardware can modify dQH and dTD memlog!("OUT{} primed @ {:?}", ep_addr.index(), time::uptime()); // FIXME return WouldBlock instead of busy waiting // wait for completion if util::wait_for_or_timeout( || self.usb.ENDPTCOMPLETE.read() & ep_mask != 0, 2 * consts::microframe(), ) .is_err() { memlog!("read: ENDPTCOMPLETE timeout"); memlog_flush_and_reset!(); } // synchronize with DMA operations before reading dQH or dTD atomic::fence(Ordering::Acquire); // TODO invalidate the data cache before reading `dtd` // clear complete bit self.usb.ENDPTCOMPLETE.write(ep_mask); self.clear_interrupt(); let token = unsafe { dtd.get_token() }; let status = token.get_status(); if status.is_active() || status.has_errors() || status.is_halted() { memlog!("read: DMA transfer failed"); memlog_flush_and_reset!(); } let left = unsafe { dqh.get_token().get_total_bytes() }; let n = cap - left as usize; memlog!("... read {:?} @ {:?}", &buf[..n], time::uptime()); // leave the dTD in place for the next transfer unsafe { dqh.clear_current_dtd(); dqh.set_next_dtd(Some(dtd)); } Ok(n) } else { if self.usb.ENDPTCOMPLETE.read() & ep_mask == 0 { return Err(UsbError::WouldBlock); } // copy out the data and re-prime buffer let dtd = unsafe { dqh.get_current_dtd().expect("UNREACHABLE") }; // clear complete bit self.usb.ENDPTCOMPLETE.write(ep_mask); self.clear_interrupt(); // synchronize with DMA operations before reading dQH or dTD atomic::fence(Ordering::Acquire); // TODO invalidate the data cache before reading `dtd` let token = unsafe { dtd.get_token() }; let status = token.get_status(); if status.is_active() || status.has_errors() || status.is_halted() { memlog!("read: DMA transfer failed"); memlog_flush_and_reset!(); } // TODO get `total_bytes` from `dtd` after invalidating the cache let left = unsafe { dqh.get_token().get_total_bytes() }; let max_packet_size = dqh.get_max_packet_size(); let n = max_packet_size - left; // NOTE OUT endpoints are given a buffer during `alloc_ep` let addr = dqh.get_address().expect("UNREACHABLE"); unsafe { buf[..n.into()] .copy_from_slice(slice::from_raw_parts(addr.as_ptr(), usize::from(n))); } memlog!("read: {} bytes @ {:?}", n, time::uptime()); unsafe { let mut token = Token::empty(); token.set_total_bytes(max_packet_size.into()); token.set_status(Status::active()); token.set_ioc(); dtd.set_token(token); dtd.set_pages(addr); // leave the dTD in place for the next transfer dqh.clear_current_dtd(); dqh.set_next_dtd(Some(dtd)); } // force all previous memory operations to complete before // priming atomic::fence(Ordering::Release); // prime the endpoint self.usb.ENDPTPRIME.rmw(|prime| prime | ep_mask); memlog!("OUT{} primed @ {:?}", ep_addr.index(), time::uptime()); Ok(n.into()) } } fn start_write(&mut self, ep_addr: EndpointAddress, bytes: &[u8]) -> Result<usize, UsbError> { memlog!( "start_write(ep={}, bytes_len={}) ... @ {:?}", ep_addr.index(), bytes.len(), time::uptime() ); crate::memlog_try_flush(); let dqh = self.get_dqh(ep_addr).ok_or(UsbError::InvalidEndpoint)?; let max_packet_size = dqh.get_max_packet_size(); let n = bytes.len(); if n > usize::from(max_packet_size) { return Err(UsbError::EndpointMemoryOverflow); } // "Executing a transfer descriptor", section 54.4.6.6.3 // the dTD should already be installed in `next_dtd` unsafe { if dqh.get_current_dtd().is_some() { // transfer in progress return Err(UsbError::WouldBlock); } else { assert!(dqh.get_next_dtd().is_some()); } } // this is the first time this endpoint is being used let dtd = unsafe { dqh.get_next_dtd().expect("UNREACHABLE") }; let addr = if let Some(addr) = dqh.get_address() { addr } else { let addr = unsafe { NonNull::new_unchecked(if max_packet_size <= 64 { self.b64s.pop().expect("OOM").as_mut_ptr() } else if max_packet_size <= 512 { self.b512s.pop().expect("OOM").as_mut_ptr() } else { unimplemented!() }) }; dqh.set_address(addr); addr }; unsafe { let mut token = Token::empty(); token.set_total_bytes(n); token.set_status(Status::active()); token.set_ioc(); dtd.set_token(token); // copy data into static buffer ptr::copy_nonoverlapping(bytes.as_ptr(), addr.as_ptr(), n); dtd.set_pages(addr); dqh.set_address(addr); } // force all previous memory operations to complete before // priming atomic::fence(Ordering::Release); // "Prime endpoint by writing 1 to correct bit position in ENDPTPRIME" let mask = util::epaddr2endptmask(ep_addr); self.usb.ENDPTPRIME.rmw(|prime| prime | mask); // now the hardware can modify dQH and dTD memlog!("IN{} primed @ {:?}", ep_addr.index(), time::uptime()); Ok(n) } fn end_write(&mut self, idx: u8) { let ep_addr = EndpointAddress::from_parts(usize::from(idx), UsbDirection::In); let mask = util::epaddr2endptmask(ep_addr); let dqh = self.get_dqh(ep_addr).expect("UNREACHABLE"); let dtd = unsafe { dqh.get_current_dtd().expect("UNREACHABLE") }; // synchronize with DMA operations before reading dQH or dTD atomic::fence(Ordering::Acquire); // TODO invalidate the cache before reading `dtd` // clear complete bit self.usb.ENDPTCOMPLETE.write(mask); self.clear_interrupt(); let token = unsafe { dtd.get_token() }; let status = token.get_status(); if status.is_active() || status.has_errors() || status.is_halted() { memlog!("write: DMA transfer failed"); memlog_flush_and_reset!(); } self.set_ep_in_complete(ep_addr.index()); let mask = (mask >> 16) as u16; if self.pre_status_out & mask != 0 { self.status_out |= mask; self.pre_status_out &= !mask; } memlog!("end_write(ep={}) @ {:?}", ep_addr.index(), time::uptime()); // leave the dTD in place for the next transfer unsafe { dqh.clear_current_dtd(); dqh.set_next_dtd(Some(dtd)); } } fn set_device_address(&mut self, addr: u8) { memlog!("set_device_address({})", addr); crate::memlog_try_flush(); // "instantaneous" address update self.usb.DEVICEADDR.write((addr as u32) << 25); // FIXME enabling an endpoint should be doing after receiving a // SET_CONFIGURATION control packet -- `usb_device` provides no hook for // that but we could do this in `read` for ep_addr in self.allocated_eps() { self.enable_ep(ep_addr) } } // # Helper functions /// Clears the USBSTS_UI bit fn clear_interrupt(&mut self) { /// USB Interrupt const USBSTS_UI: u32 = 1; self.usb.USBSTS.write(USBSTS_UI); } fn port_change(&mut self) { memlog!("port_change @ {:?}", time::uptime()); crate::memlog_try_flush(); // clear the 'Port Change Detect' bit self.usb.USBSTS.write(USBSTS_PCI); } fn get_dqh(&self, ep_addr: EndpointAddress) -> Option<Ref<dQH>> { let dqhidx = util::epaddr2dqhidx(ep_addr); // bounds check if dqhidx < ENDPOINTS { // NOTE(unsafe) `ENDPTLISTADDR` has already been initialized at this point Some(unsafe { Ref::new_unchecked((self.usb.ENDPTLISTADDR.read() as *const dQH).add(dqhidx)) }) } else { None } } fn is_ep_being_used(&self, ep_addr: EndpointAddress) -> bool { let mask = 1 << util::epaddr2dqhidx(ep_addr); self.used_dqhs & mask != 0 } fn mark_ep_as_used(&mut self, ep_addr: EndpointAddress) { let mask = 1 << util::epaddr2dqhidx(ep_addr); self.used_dqhs |= mask; } /// Returns an iterator over all the allocated endpoints fn allocated_eps(&self) -> impl Iterator<Item = EndpointAddress> { OneIndices::of(u32::from(self.used_dqhs)).map(|idx| util::dqhidx2epaddr(usize::from(idx))) } fn configure_ep(&mut self, ep_addr: EndpointAddress, ep_type: EndpointType) { assert_ne!( ep_addr.index(), 0, "endpoint 0 is always the control endpoint" ); let idx = ep_addr.index(); assert_eq!( idx, 1, "configuring endpoint {} is not supported at the moment", idx ); const ENDPTCTRL_TXT_OFFSET: u8 = 18; const ENDPTCTRL_TXT_MASK: u32 = 0b11; const ENDPTCTRL_RXT_OFFSET: u8 = 2; const ENDPTCTRL_RXT_MASK: u32 = 0b00; const BULKT: u32 = 0b10; let ty = match ep_type { EndpointType::Control => 0b00, EndpointType::Isochronous => 0b01, EndpointType::Bulk => BULKT, EndpointType::Interrupt => 0b11, }; // "If one endpoint direction is enabled and the paired endpoint // ofopposite direction is disabled then the unused direction // typemust be changed from the default control-type to any other // type (that is Bulk-type). leaving an unconfigured endpoint // controlcauses undefined behavior for the data pid tracking on // the active endpoint/direction." -- section 56.6.40 of ULLRM if ep_addr.is_out() { self.usb.ENDPTCTRL1.rmw(|ctrl| { (ctrl & !(ENDPTCTRL_RXT_MASK << ENDPTCTRL_RXT_OFFSET)) | (ty << ENDPTCTRL_RXT_OFFSET) }); if self.usb.ENDPTCTRL1.read() & ENDPTCTRL_TXE == 0 { self.usb.ENDPTCTRL1.rmw(|ctrl| { (ctrl & !(ENDPTCTRL_TXT_MASK << ENDPTCTRL_TXT_OFFSET)) | (BULKT << ENDPTCTRL_TXT_OFFSET) }); } } else { self.usb.ENDPTCTRL1.rmw(|ctrl| { (ctrl & !(ENDPTCTRL_TXT_MASK << ENDPTCTRL_TXT_OFFSET)) | (ty << ENDPTCTRL_TXT_OFFSET) }); if self.usb.ENDPTCTRL1.read() & ENDPTCTRL_RXE == 0 { self.usb.ENDPTCTRL1.rmw(|ctrl| { (ctrl & !(ENDPTCTRL_RXT_MASK << ENDPTCTRL_RXT_OFFSET)) | (BULKT << ENDPTCTRL_RXT_OFFSET) }); } } } fn enable_ep(&mut self, ep_addr: EndpointAddress) { // TODO generalize beyond endpoint 1 if ep_addr.is_out() { let idx = ep_addr.index(); if idx != 0 { // prime the endpoint let mask = util::epaddr2endptmask(ep_addr); self.usb.ENDPTPRIME.rmw(|prime| prime | mask); memlog!("primed OUT{} @ {:?}", idx, time::uptime()); } self.usb.ENDPTCTRL1.rmw(|ctrl| ctrl | ENDPTCTRL_RXE); } else { self.usb.ENDPTCTRL1.rmw(|ctrl| ctrl | ENDPTCTRL_TXE); } } /// Resets the endpoint PID sequence fn reset_ep(&mut self, ep_addr: EndpointAddress) { // TODO generalize beyond endpoint 1 if ep_addr.is_out() { // TODO turn into a debug assertion assert_ne!( self.usb.ENDPTCTRL1.read() & ENDPTCTRL_RXE, 0, "endpoint not enabled" ); self.usb.ENDPTCTRL1.rmw(|ctrl| ctrl | ENDPTCTRL_RXR); } else { // TODO turn into a debug assertion assert_ne!(self.usb.ENDPTCTRL1.read() & ENDPTCTRL_TXE, 0); self.usb.ENDPTCTRL1.rmw(|ctrl| ctrl | ENDPTCTRL_TXR); } } fn set_ep_in_complete(&mut self, index: usize) { assert!(index < ENDPOINTS / 2); if let Some(ep_in_complete) = self.ep_in_complete.as_mut() { *ep_in_complete |= 1 << index; } else { self.ep_in_complete = Some(1 << index); } } }
<reponame>bordoley/java-restlib /* * Copyright (C) 2012 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package restlib.serializable.atom; import restlib.net.IRI; import com.google.common.base.Optional; import com.google.common.base.Preconditions; public abstract class AtomFeedWrapper<T extends AtomEntry<?>> implements AtomFeed<T> { private final AtomFeed<T> delegate; protected AtomFeedWrapper(final AtomFeed<T> delegate) { Preconditions.checkNotNull(delegate); this.delegate = delegate; } protected AtomFeed<? extends AtomEntry<?>> delegate() { return delegate; } @Override public Iterable<AtomPerson> getAuthors() { return delegate.getAuthors(); } @Override public Iterable<AtomCategory> getCategories() { return delegate.getCategories(); } @Override public Iterable<AtomPerson> getContributors() { return delegate.getContributors(); } @Override public Iterable<T> getEntries() { return delegate.getEntries(); } @Override public Optional<AtomGenerator> getGenerator() { return delegate.getGenerator(); } @Override public Optional<IRI> getIcon() { return delegate.getIcon(); } @Override public IRI getId() { return delegate.getId(); } @Override public Iterable<AtomLink> getLinks() { return delegate.getLinks(); } @Override public Optional<IRI> getLogo() { return delegate.getLogo(); } @Override public Optional<String> getRights() { return delegate.getRights(); } @Override public Optional<String> getSubtitle() { return delegate.getSubtitle(); } @Override public String getTitle() { return delegate.getTitle(); } @Override public AtomDate getUpdated() { return delegate.getUpdated(); } }
Multiplex digital PCR with digital melting curve analysis on a self-partitioning SlipChip. Digital polymerase chain reaction (digital PCR) can provide absolute quantification of target nucleic acids with high sensitivity, excellent precision, and superior resolution. Digital PCR has broad applications in both life science research and clinical molecular diagnostics. However, limited by current fluorescence imaging methods, parallel quantification of multiple target molecules in a single digital PCR remains challenging. Here, we present a multiplex digital PCR method using digital melting curve analysis (digital MCA) with a SlipChip microfluidic system. The self-partitioning SlipChip (sp-SlipChip) can generate an array of nanoliter microdroplets with trackable physical positions using a simple loading-and-slipping operation. A fluorescence imaging adaptor and an in situ thermal cycler can be used to perform digital PCR and digital MCA on the sp-SlipChip. The unique signature melting temperature (Tm) designed for amplification products can be used as a fingerprint to further classify the positive amplification partitions into different subgroups. Amplicons with Tm differences as low as 1.5 degrees celsius were clearly separated, and multiple amplicons in the same partition could also be distinguished by digital MCA. We further demonstrated this digital MCA method with simultaneous digital quantification of five common respiratory pathogens, including Staphylococcus aureus, Acinetobacter baumannii, Streptococcus pneumoniae, Hemophilus influenzae, and Klebsiella pneumoniae. Since digital MCA only requires an intercalation dye instead of sequence-specific hydrolysis probes to perform multiplex digital PCR analysis, it can be less expensive and not limited to the number of fluorescence channels.
<reponame>vlehtola/questmud<filename>lib/wizards/bulut/forl/road30.c #include "room.h" #undef EXTRA_RESET #define EXTRA_RESET extra_reset(); object hobbit; extra_reset() { if (!hobbit || !living(hobbit)) { hobbit = clone_object("/wizards/bulut/forl/monsters/hcitizen.c"); move_object(hobbit, this_object()); } } ONE_EXIT("wizards/bulut/forl/road27","west", "In the village", "You are walking on a small path.\n" + "Road ends here.\n",3)
def separa_e_une(texto: str): # separa o texto usando espaço como # limitador para cada elemento texto_separado = texto.split(' ') # o novo texto será a união das palavras # encontradas, agora separadas por um traço texto_corrigido = '-'.join(texto_separado) # retorna cada um dos elementos encontrados return texto_separado, texto_corrigido
/** * Load up word table. Note that there may be superfluous spaces throughout for formatting * reasons, and these are excised before being added to the table. * * @throws IOException if unable to find file */ public static void loadWordTable() throws IOException { table = new Hashtable<String,Boolean>(); Iterator<String> it = new StringFileIterator(new File (".", wordTable)); while (it.hasNext()) { String word = it.next(); word = word.trim(); table.put(word, Boolean.TRUE); } }
Nations for Mental Health : An Action Programme on Mental Health for Underserved Populations • To enhance the attention of people and governments of the world to the effects of mental health problems and substance abuse on the social well-being and physical health of the world's underserved populations. A first step is to increase awareness and concern of the importance of mental health through a series of key high profile regional and international events. Secondly, efforts will be devoted to building up the will of the key political authorities to participate. Thirdly, and finally, efforts are to be directed at securing political commitments by decision-makers. • To establish a number of demonstration projects in each of the six WHO regions of the world. They are meant to illustrate the potential of collaborative efforts at country level, with the view of leading on to projects of a larger scale. • To encourage technical support between countries for service development, research and training. The implementation of the programme depends on voluntary contributions from governments, foundations, individuals and others. It receives financial and technical support from the Eli This document is not a formal publication of the World Health Organization (WHO), and all rights are reserved by the Organization. The document may, however, be freely reviewed, abstracted, reproduced and translated, in part or in whole, but not for sale nor for use in conjunction with commercial purposes. The views expressed in documents by named authors are solely the responsibility of those authors.
def create_and_add_acl(self, specification): self.specifications.append(specification) for aclid in specification: _tree = tree.Tree() for acl in specification[aclid]: network = acl['src'] mask = self.de_ciscoise(acl['netm']) action = acl['action'] fullnet = network + "/" + mask _tree.Insert(fullnet, action + " in acl " + aclid) self.acls[aclid] = _tree
/* * Copyright (c) "Neo4j" * Neo4j Sweden AB [http://neo4j.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.kernel.impl.store; import java.util.Arrays; import org.neo4j.kernel.impl.store.format.standard.PropertyRecordFormat; import org.neo4j.kernel.impl.store.record.PropertyBlock; import org.neo4j.storageengine.api.cursor.StoreCursors; import org.neo4j.values.storable.Value; import org.neo4j.values.storable.Values; /** * Defines valid property types. */ public enum PropertyType { BOOL(1) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return Values.booleanValue(getValue(block.getSingleValueLong())); } private boolean getValue(long propBlock) { return (propBlock & 0x1) == 1; } }, BYTE(2) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return Values.byteValue(block.getSingleValueByte()); } }, SHORT(3) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return Values.shortValue(block.getSingleValueShort()); } }, CHAR(4) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return Values.charValue((char) block.getSingleValueShort()); } }, INT(5) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return Values.intValue(block.getSingleValueInt()); } }, LONG(6) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { long firstBlock = block.getSingleValueBlock(); long value = valueIsInlined(firstBlock) ? (block.getSingleValueLong() >>> 1) : block.getValueBlocks()[1]; return Values.longValue(value); } private boolean valueIsInlined(long firstBlock) { // [][][][][ i,tttt][kkkk,kkkk][kkkk,kkkk][kkkk,kkkk] return (firstBlock & 0x10000000L) > 0; } @Override public int calculateNumberOfBlocksUsed(long firstBlock) { return valueIsInlined(firstBlock) ? 1 : 2; } }, FLOAT(7) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return Values.floatValue(Float.intBitsToFloat(block.getSingleValueInt())); } }, DOUBLE(8) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return Values.doubleValue(Double.longBitsToDouble(block.getValueBlocks()[1])); } @Override public int calculateNumberOfBlocksUsed(long firstBlock) { return 2; } }, STRING(9) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors storeCursors) { return store.getTextValueFor(block, storeCursors); } @Override public byte[] readDynamicRecordHeader(byte[] recordBytes) { return EMPTY_BYTE_ARRAY; } }, ARRAY(10) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return store.getArrayFor(block, cursors); } @Override public byte[] readDynamicRecordHeader(byte[] recordBytes) { byte itemType = recordBytes[0]; if (itemType == STRING.byteValue()) { return headOf(recordBytes, DynamicArrayStore.STRING_HEADER_SIZE); } else if (itemType <= DOUBLE.byteValue()) { return headOf(recordBytes, DynamicArrayStore.NUMBER_HEADER_SIZE); } else if (itemType == GEOMETRY.byteValue()) { return headOf(recordBytes, DynamicArrayStore.GEOMETRY_HEADER_SIZE); } else if (itemType == TEMPORAL.byteValue()) { return headOf(recordBytes, DynamicArrayStore.TEMPORAL_HEADER_SIZE); } throw new IllegalArgumentException("Unknown array type " + itemType); } private byte[] headOf(byte[] bytes, int length) { return Arrays.copyOf(bytes, length); } }, SHORT_STRING(11) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return LongerShortString.decode(block); } @Override public int calculateNumberOfBlocksUsed(long firstBlock) { return LongerShortString.calculateNumberOfBlocksUsed(firstBlock); } }, SHORT_ARRAY(12) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return ShortArray.decode(block); } @Override public int calculateNumberOfBlocksUsed(long firstBlock) { return ShortArray.calculateNumberOfBlocksUsed(firstBlock); } }, GEOMETRY(13) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return GeometryType.decode(block); } @Override public int calculateNumberOfBlocksUsed(long firstBlock) { return GeometryType.calculateNumberOfBlocksUsed(firstBlock); } }, TEMPORAL(14) { @Override public Value value(PropertyBlock block, PropertyStore store, StoreCursors cursors) { return TemporalType.decode(block); } @Override public int calculateNumberOfBlocksUsed(long firstBlock) { return TemporalType.calculateNumberOfBlocksUsed(firstBlock); } }; public static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; public static final int BLOCKS_USED_FOR_BAD_TYPE_OR_ENCODING = -1; // TODO In wait of a better place private static final int PAYLOAD_SIZE = PropertyRecordFormat.DEFAULT_PAYLOAD_SIZE; private final int type; PropertyType(int type) { this.type = type; } /** * Returns an int value representing the type. * * @return The int value for this property type */ public int intValue() { return type; } /** * Returns a byte value representing the type. As long as there are * &lt 128 PropertyTypes, this should be equal to intValue(). When this * statement no longer holds, this method should be removed. * * @return The byte value for this property type */ public byte byteValue() { return (byte) type; } public abstract Value value(PropertyBlock block, PropertyStore store, StoreCursors storeCursors); public static PropertyType getPropertyTypeOrNull(long propBlock) { // [][][][][ ,tttt][kkkk,kkkk][kkkk,kkkk][kkkk,kkkk] int type = typeIdentifier(propBlock); switch (type) { case 1: return BOOL; case 2: return BYTE; case 3: return SHORT; case 4: return CHAR; case 5: return INT; case 6: return LONG; case 7: return FLOAT; case 8: return DOUBLE; case 9: return STRING; case 10: return ARRAY; case 11: return SHORT_STRING; case 12: return SHORT_ARRAY; case 13: return GEOMETRY; case 14: return TEMPORAL; default: return null; } } private static int typeIdentifier(long propBlock) { return (int) ((propBlock & 0x000000000F000000L) >> 24); } public static PropertyType getPropertyTypeOrThrow(long propBlock) { PropertyType type = getPropertyTypeOrNull(propBlock); if (type == null) { throw new InvalidRecordException("Unknown property type: " + typeIdentifier(propBlock) + " (from property block " + Long.toHexString(propBlock) + ")."); } return type; } // TODO In wait of a better place public static int getPayloadSize() { return PAYLOAD_SIZE; } // TODO In wait of a better place public static int getPayloadSizeLongs() { return PAYLOAD_SIZE >>> 3; } public int calculateNumberOfBlocksUsed(long firstBlock) { return 1; } public byte[] readDynamicRecordHeader(byte[] recordBytes) { throw new UnsupportedOperationException(); } }
(CBS News) CHICAGO - A fast food chain known for putting Christian principles ahead of profits is facing a culture war over same-sex marriage. The controversy is prompting politicians and activist groups on both sides of the issue to organize events where like-minded people can demonstrate their discontent. The Chick-fil-A menu offers sandwiches, nuggets and strips, but critics say it is all served with a side-order of bigotry. The company's boss has angered gay rights advocates, some politicians - even the Muppets - with his stand against same-sex marriage. In an interview on The Ken Coleman Show last month, Chick-fil-A President and COO Dan Cathy said, "I think we're inviting God's judgment on our nation when we shake our fist at him and say, you know, 'We know better than you as to what constitutes marriage."' Cathy added that his company is "very much supporting of the family, the biblical definition of the family unit." Within days, the Internet was on fire with criticism. Some stores were picketed. The Henson Company pulled its Muppets out of a marketing deal with Chick-fil-A. And the mayors of Boston and Chicago sounded off. Chicago Mayor Rahm Emanuel told reporters, "What the COO has said as it relates to gay marriage and gay couples is not what I believe. But more importantly, it's not what the people of the city of Chicago believe." Chick-fil-A has 1,600 locations in 39 states and Washington, D.C. and had more than $4 billion in sales last year. Chicago has one Chick-fil-A franchise and there were plans afoot to build a second. But city Alderman Joe Moreno says they can forget about getting a permit. "I'm not gonna sit on the sidelines and allow them to come in when I know in my heart that they believe in discriminating against gay people," Moreno says. Chicago Chick-fil-A protesters plan "kiss-in" Chicago leaders slam "discriminatory" Chick-fil-A Huckabee calls for "Chick-fil-A appreciation day" The franchise owner of the lone Chicago Chick-fil-A, Lauren Silich, says hold on: Her store welcomes everyone and has gay employees. "We are open to any sort of lifestyle ... believe what ... you want to believe in and what you do, to me, it really doesn't matter," she says. Conservatives have rallied to the chain's side. Radio host and former Arkansas Gov. Mike Huckabee and former GOP presidential contender and former Sen. Rick Santorum are urging everyone to patronize their local Chick-fil-A on Wednesday. Still others wonder whether free speech is at issue here, and whether it's fair to consider action against a company because of the personal beliefs of its boss. But a group called Equality Illinois is urging gay and lesbian couples to go to their local Chick-fil-A restaurants on Friday, Aug. 3 for a "kiss-in" campaign the group is promoting along with other gay rights organizations nationwide, according to CBS station WBBM. Equality Illinois is also urging Chicagoans who support gay rights to participate in an "Eat for Love Day" on Wednesday. The organization is urging people to eat at a local restaurant that supports LGBT rights and "is unafraid to show its support for equality for everyone." To see the Dean Reynolds report, click on the video in the player above.
// Custom Kinesisanalytics listing functions using similar formatting as other service generated code. func ListApplicationsPages(conn *kinesisanalytics.KinesisAnalytics, input *kinesisanalytics.ListApplicationsInput, fn func(*kinesisanalytics.ListApplicationsOutput, bool) bool) error { for { output, err := conn.ListApplications(input) if err != nil { return err } lastPage := !aws.BoolValue(output.HasMoreApplications) if !fn(output, lastPage) || lastPage { break } input.ExclusiveStartApplicationName = output.ApplicationSummaries[len(output.ApplicationSummaries)-1].ApplicationName } return nil }
Slewing maneuver and vibration control of tethered space solar power satellite Control approach is presented for vibration suppression of tethered space solar power satellite (SSPS) during slewing maneuver by combining attitude control and active vibration control based on tether tension. The mathematical description for the slewing motion of tethered SSPS is proposed. Mission Function (MF) Control Algorithm is applied in the design of a PD controller, which is able not only to stabilize the satellite attitude but also suppress vibration of the flexible solar panel. An additional active flexible control system, acting on the flexible parts can be desired for the further micro-vibration suppression, is designed by employing the MF control algorithm and the nonlinearity of the flexible tether is taken into account in the controller design. In the design process, the stability of the vibration control system is proved. Simulation results demonstrate the proposed approach can significantly reduce the vibration of the flexible solar panel during and after the maneuver operation.
/** * This method is ask for player's input and use it as switch case to support another method of selling animal. * It checks whether if player has animals to start with if not player will not be able to sell animal * * @param player who chooses to sell animal. */ public void saleStart(Player player) { if (player.getAnimalList().size() == 0) { System.out.println("You don't own any animal."); System.out.println("Return to main menu..."); backToMenu = true; FormatHelp.threadSleep(); FormatHelp.emptyScreen(); } else { System.out.println("Welcome to the Store!"); System.out.println("Do you want to sell animals?"); System.out.println("1. Yes. 2. No"); try { userInput = Integer.parseInt(console.nextLine()); } catch (Exception e) { System.out.println("Incorrect input. Enter a number."); } switch (userInput) { case 1: printAnimalsOwn(player); System.out.println("Return to main menu..."); backToMenu = false; FormatHelp.threadSleep(); FormatHelp.emptyScreen(); break; case 2: System.out.println("Return to main menu..."); backToMenu = true; FormatHelp.threadSleep(); FormatHelp.emptyScreen(); break; default: FormatHelp.threadSleep(); FormatHelp.emptyScreen(); saleStart(player); break; } } }
<reponame>MerhuBerahu/Python-RPG<filename>Snippets.py #! /usr/bin/env python # -*- coding: utf-8 -*- """ PASTEBIN FRO COMMON SNIPPETS OF CODE """ '''OLD ENEMEY INITILIZATION''' #Enemy - race, name, level, mainjob, support job, gold, inventory battle(Enemy(elf,"Goblin", 25,white_mage, "Warrior",52,inventory = [('Matted Fur',70),('High Quality Fur',10)]))
/* IMPRIMIR ARTICULO * E: No tiene * S: No tiene * D: Solo imprimer los datos del articulo */ void Article::imprimir(){ qDebug() << "ID Article: " << this->id; qDebug() << "Category: " << this->category; qDebug() << "In Stock: " << this->stock; qDebug() << "Location: " << this->position; qDebug() << "Time: " << this->time; qDebug()<<""; }
module Command.CreateNewProject ( createNewProject ) where import Control.Monad.Except (throwError) import Control.Monad.IO.Class (liftIO) import qualified Path as P import System.Directory (createDirectory, getCurrentDirectory) import qualified System.Directory import qualified System.FilePath as FP import Text.Printf (printf) import Command (Command, CommandError (..)) import qualified Command.Common import qualified Common import qualified Data import ExternalCode (SourceExternalCodeDir) import StrongPath (Abs, Dir, File, Path, Rel, (</>)) import qualified StrongPath as SP import qualified Util.Terminal as Term createNewProject :: String -> Command () createNewProject projectName = do absCwd <- liftIO getCurrentDirectory waspProjectDir <- case SP.parseAbsDir $ absCwd FP.</> projectName of Left err -> throwError $ CommandError ("Failed to parse absolute path to wasp project dir: " ++ show err) Right sp -> return sp liftIO $ do createDirectorySP waspProjectDir writeFileSP (waspProjectDir </> mainWaspFileInWaspProjectDir) mainWaspFileContent writeFileSP (waspProjectDir </> gitignoreFileInWaspProjectDir) gitignoreFileContent writeFileSP (waspProjectDir </> Common.dotWaspRootFileInWaspProjectDir) "File marking the root of Wasp project." let extCodeDir = waspProjectDir </> Common.extCodeDirInWaspProjectDir liftIO $ do createDirectorySP extCodeDir dataDir <- Data.getAbsDataDirPath let copyTemplateFile' = copyTemplateFile dataDir extCodeDir copyTemplateFile' (SP.fromPathRelFile [P.relfile|new/ext/MainPage.js|]) mainPageJsFileInExtCodeDir copyTemplateFile' (SP.fromPathRelFile [P.relfile|new/ext/Main.css|]) mainCssFileInExtCodeDir copyTemplateFile' (SP.fromPathRelFile [P.relfile|new/ext/waspLogo.png|]) waspLogoFileInExtCodeDir liftIO $ do putStrLn $ Term.applyStyles [Term.Green] ("Created new Wasp app in ./" ++ projectName ++ " directory!") putStrLn "To run it, do:" putStrLn "" putStrLn $ Term.applyStyles [Term.Bold] (" cd " ++ projectName) putStrLn $ Term.applyStyles [Term.Bold] " wasp start" putStrLn "" putStrLn Command.Common.alphaWarningMessage where copyTemplateFile :: Path Abs (Dir Data.DataDir) -> Path Abs (Dir SourceExternalCodeDir) -> Path (Rel Common.CliTemplatesDir) File -> Path (Rel SourceExternalCodeDir) File -> IO () copyTemplateFile dataDir extCodeDir srcTmplFile dstExtDirFile = System.Directory.copyFile (SP.toFilePath (dataDir </> cliTemplatesDirInDataDir </> srcTmplFile)) (SP.toFilePath (extCodeDir </> dstExtDirFile)) cliTemplatesDirInDataDir :: Path (Rel Data.DataDir) (Dir Common.CliTemplatesDir) cliTemplatesDirInDataDir = SP.fromPathRelDir [P.reldir|Cli/templates|] mainWaspFileInWaspProjectDir :: Path (Rel Common.WaspProjectDir) File mainWaspFileInWaspProjectDir = SP.fromPathRelFile [P.relfile|main.wasp|] mainWaspFileContent = unlines [ "app %s {" `printf` projectName , " title: \"%s\"" `printf` projectName , "}" , "" , "route \"/\" -> page Main" , "page Main {" , " component: import Main from \"@ext/MainPage.js\"" , "}" ] gitignoreFileInWaspProjectDir :: Path (Rel Common.WaspProjectDir) File gitignoreFileInWaspProjectDir = SP.fromPathRelFile [P.relfile|.gitignore|] gitignoreFileContent = unlines [ "/.wasp/" ] mainPageJsFileInExtCodeDir :: Path (Rel SourceExternalCodeDir) File mainPageJsFileInExtCodeDir = SP.fromPathRelFile [P.relfile|MainPage.js|] mainCssFileInExtCodeDir :: Path (Rel SourceExternalCodeDir) File mainCssFileInExtCodeDir = SP.fromPathRelFile [P.relfile|Main.css|] waspLogoFileInExtCodeDir :: Path (Rel SourceExternalCodeDir) File waspLogoFileInExtCodeDir = SP.fromPathRelFile [P.relfile|waspLogo.png|] writeFileSP = writeFile . SP.toFilePath createDirectorySP = createDirectory . SP.toFilePath
async def _get_num_open_trades(self) -> int: num = 0 for pair in self.trades: num += len(self.trades[pair]['open']) return num
def send_cmd(self, name, iterations=5): packet = cmd(name) for i in range(iterations): for device in self.devices: device.send_data(packet)
// EncodeBoolean append given bool as true or false string func (e *Encoder) EncodeBoolean(b bool) { if b { e.Buf = append(e.Buf, "true"...) } else { e.Buf = append(e.Buf, "false"...) } }
a = input().split() ost = int(a[0]) gost = int(a[1]) komplekt = gost pribs = list(map(int, input().split())) pribs.sort() bilo = [] alpha = set() while pribs: hah = pribs[0] heh = pribs.count(hah) bilo.append(heh) while hah in pribs: pribs.remove(hah) a = max(bilo) if a > gost: if a % gost == 0: komplekt *= (a // gost) else: komplekt += (a // gost)*komplekt summa = 0 for a in bilo: summa += (komplekt - a) print(summa)
//! Simple USB Audio example for PIC32MX270 (28 pins) //! //! Simulates a microphone that emits a 1 kHz tone and a dummy audio output and //! prints the payload length of each thousand received audio frame an reports //! changes of the alternate settings. //! #![no_std] #![no_main] #![feature(alloc_error_handler)] use alloc_pic32::Pic32Heap; use core::fmt::Write; use embedded_hal::{blocking::delay::DelayMs, digital::v2::*}; use mips_rt::entry; use panic_halt as _; use pic32_config_sector::pic32mx2xx::*; use pic32_hal::{ clock::Osc, coretimer::Delay, gpio::GpioExt, pac, pps::{MapPin, NoPin, PpsExt}, pps_no_pin, time::U32Ext, uart::Uart, usb::UsbBus, }; use usb_device::prelude::*; use usbd_audio::{AudioClassBuilder, Format, StreamConfig, TerminalType}; // PIC32 configuration registers for PIC32MX2xx #[link_section = ".configsfrs"] #[used] pub static CONFIGSFRS: ConfigSector = ConfigSector::default() // DEVCFG3 .FVBUSONIO(FVBUSONIO::OFF) .FUSBIDIO(FUSBIDIO::OFF) // DEVCFG2 .FPLLODIV(FPLLODIV::DIV_2) .UPLLEN(UPLLEN::ON) .UPLLIDIV(UPLLIDIV::DIV_2) .FPLLMUL(FPLLMUL::MUL_20) .FPLLIDIV(FPLLIDIV::DIV_2) // DEVCFG 1 .FWDTEN(FWDTEN::OFF) .WDTPS(WDTPS::PS1048576) .FPBDIV(FPBDIV::DIV_1) .POSCMOD(POSCMOD::XT) .FSOSCEN(FSOSCEN::OFF) .FNOSC(FNOSC::PRIPLL) // DEVCFG 0 .JTAGEN(JTAGEN::OFF) .build(); #[global_allocator] static ALLOCATOR: Pic32Heap = Pic32Heap::empty(); #[entry] fn main() -> ! { // Initialize the allocator BEFORE you use it let start = mips_rt::heap_start() as usize; let size = 8192; // in bytes unsafe { ALLOCATOR.init(start, size) } let p = pac::Peripherals::take().unwrap(); let parts = p.PORTB.split(); let vpins = p.PPS.split(); let sysclock = 40_000_000_u32.hz(); let clock = Osc::new(p.OSC, sysclock); let mut timer = Delay::new(sysclock); let txd = parts .rb0 .into_push_pull_output() .map_pin(vpins.outputs.u2tx); //let rxd = NoPin::new().map_pin(vpins.inputs.u2rx); let rxd = pps_no_pin!(vpins.inputs.u2rx); let uart = Uart::uart2(p.UART2, &clock, 115200, rxd, txd); timer.delay_ms(10u32); let (mut tx, _) = uart.split(); writeln!(tx, "USB audio test").unwrap(); let mut led = parts.rb5.into_push_pull_output(); led.set_high().unwrap(); let usb_bus = UsbBus::new(p.USB); let mut usb_audio = AudioClassBuilder::new() .input( StreamConfig::new_discrete( Format::S16le, 1, &[48000], TerminalType::InMicrophone).unwrap()) .output( StreamConfig::new_discrete( Format::S24le, 2, &[44100, 48000, 96000], TerminalType::OutSpeaker).unwrap()) .build(&usb_bus) .unwrap(); let mut usb_dev = UsbDeviceBuilder::new(&usb_bus, UsbVidPid(0x16c0, 0x27dd)) .max_packet_size_0(64) .manufacturer("Kiffie Labs") .product("Audio port") .serial_number("42") .build(); let sinetab = [ 0i16, 4276, 8480, 12539, 16383, 19947, 23169, 25995, 28377, 30272, 31650, 32486, 32767, 32486, 31650, 30272, 28377, 25995, 23169, 19947, 16383, 12539, 8480, 4276, 0, -4276, -8480, -12539, -16383, -19947, -23169, -25995, -28377, -30272, -31650, -32486, -32767, -32486, -31650, -30272, -28377, -25995, -23169, -19947, -16383, -12539, -8480, -4276, ]; let sinetab_le = unsafe { &*(&sinetab as *const _ as *const [u8; 96]) }; let mut ctr = 0; let mut input_alt_setting = 0; let mut output_alt_setting = 0; loop { if usb_dev.poll(&mut [&mut usb_audio]) { let mut buf = [0u8; 1024]; if let Ok(len) = usb_audio.read(&mut buf) { ctr += 1; if ctr >= 1000 { ctr = 0; writeln!(tx, "RX len = {}", len).unwrap(); } } } if input_alt_setting != usb_audio.input_alt_setting().unwrap() || output_alt_setting != usb_audio.output_alt_setting().unwrap() { input_alt_setting = usb_audio.input_alt_setting().unwrap(); output_alt_setting = usb_audio.output_alt_setting().unwrap(); writeln!(tx, "Alt. set. {} {}", input_alt_setting, output_alt_setting).unwrap(); } usb_audio.write(sinetab_le).ok(); } } #[alloc_error_handler] fn alloc_error(layout: core::alloc::Layout) -> ! { panic!("Cannot allocate heap memory: {:?}", layout); }
/*************************************************************************** * Copyright (c) 2016, Johan Mabille, Sylvain Corlay, Martin Renou * * Copyright (c) 2016, QuantStack * * * * Distributed under the terms of the BSD 3-Clause License. * * * * The full license is in the file LICENSE, distributed with this software. * ****************************************************************************/ #include "xmock_server.hpp" namespace xeus { xmock_messenger::xmock_messenger(xmock_server* server) : p_server(server) { } nl::json xmock_messenger::send_to_shell_impl(const nl::json& message) { return p_server->notify_internal_listener(message); } xmock_server::xmock_server() : m_messenger(this) { } void xmock_server::receive_shell(xmessage message) { m_shell_messages.push(std::move(message)); } void xmock_server::receive_control(xmessage message) { m_control_messages.push(std::move(message)); } void xmock_server::receive_stdin(xmessage message) { m_stdin_messages.push(std::move(message)); } std::size_t xmock_server::shell_size() const { return m_shell_messages.size(); } xmessage xmock_server::read_shell() { return read_impl(m_shell_messages); } std::size_t xmock_server::control_size() const { return m_control_messages.size(); } xmessage xmock_server::read_control() { return read_impl(m_control_messages); } std::size_t xmock_server::stdin_size() const { return m_stdin_messages.size(); } xmessage xmock_server::read_stdin() { return read_impl(m_stdin_messages); } std::size_t xmock_server::iopub_size() const { return m_iopub_messages.size(); } xpub_message xmock_server::read_iopub() { xpub_message res = std::move(m_iopub_messages.back()); m_iopub_messages.pop(); return res; } xmessage xmock_server::read_impl(message_queue& q) { xmessage res = std::move(q.back()); q.pop(); return res; } xcontrol_messenger& xmock_server::get_control_messenger_impl() { return m_messenger; } void xmock_server::send_shell_impl(xmessage message) { m_shell_messages.push(std::move(message)); } void xmock_server::send_control_impl(xmessage message) { m_control_messages.push(std::move(message)); } void xmock_server::send_stdin_impl(xmessage message) { m_stdin_messages.push(std::move(message)); } void xmock_server::publish_impl(xpub_message message, channel) { m_iopub_messages.push(std::move(message)); } void xmock_server::start_impl(xpub_message message) { m_iopub_messages.push(std::move(message)); } void xmock_server::abort_queue_impl(const listener&, long) { } void xmock_server::stop_impl() { } void xmock_server::update_config_impl(xconfiguration&) const { } std::unique_ptr<xserver> make_mock_server(xcontext&, const xconfiguration&, nl::json::error_handler_t) { return std::make_unique<xmock_server>(); } }
// GetMeteredState freshly checks the state, contacting the licensing server. func GetMeteredState() { state, err := unilicense.GetMeteredState() if err != nil { fmt.Printf("ERROR getting metered state: %+v\n", err) return } fmt.Printf("State: %+v\n", state) if state.OK { fmt.Printf("State is OK\n") } else { fmt.Printf("State is not OK\n") } fmt.Printf("Credits: %v\n", state.Credits) fmt.Printf("Used credits: %v\n", state.Used) return }
/* * Query all variations. This method stores all config and build names. */ private void internalQueryAllVariations(String configPattern) { if (this.fSQL == null) return; if (BUILDS != null) return; long start = System.currentTimeMillis(); if (DEBUG) { DEBUG_WRITER.print(" - DB query all variations for configuration pattern: "+configPattern); DEBUG_WRITER.print("..."); } ResultSet result = null; try { CONFIGS = null; BUILDS = null; BUILDS_LENGTH = 0; result = this.fSQL.queryAllVariations(configPattern); while (result.next()) { String variation = result.getString(1); StringTokenizer tokenizer = new StringTokenizer(variation, "=|"); tokenizer.nextToken(); storeBuildName(tokenizer.nextToken()); tokenizer.nextToken(); storeConfig(tokenizer.nextToken()); tokenizer.nextToken(); storeVm(tokenizer.nextToken()); } if (BUILDS_LENGTH == 0) { BUILDS = EMPTY_LIST; } } catch (SQLException e) { PerformanceTestPlugin.log(e); } finally { if (result != null) { try { result.close(); } catch (SQLException e1) { } } if (DEBUG) DEBUG_WRITER.println("done in " + (System.currentTimeMillis() - start) + "ms]"); //$NON-NLS-2$ } }
/** * Initialize our internal MessageResources bundle. * * @exception ServletException if we cannot initialize these resources */ protected void initResources() throws ServletException { try { resources = MessageResources.getMessageResources(resourceName); } catch (MissingResourceException e) { sLog.error("Cannot load internal resources from '" + resourceName + "'", e); throw new UnavailableException("Cannot load internal resources from '" + resourceName + "'"); } }
// transactionsLink returns the horizon endpoint to get transaction information. func (a *Account) transactionsLink() (string, error) { if a.internal == nil { if err := a.load(); err != nil { return "", err } } return a.linkHref(a.internal.Links.Transactions), nil }
// validateWrite determines if a particular chunk can be written. // If the size of the on disk chunk is smaller than the request // chunk then that chunk is incomplete and we allow a write to it. func (r *fileRequestWriter) validateWrite(dir, path string, req *flow.Request) error { if err := os.MkdirAll(dir, 0777); err != nil { return err } finfo, err := os.Stat(path) switch { case os.IsNotExist(err): return nil case err != nil: return app.ErrInvalid case finfo.Size() < int64(req.FlowChunkSize): return nil case finfo.Size() == int64(req.FlowChunkSize): return app.ErrExists default: return app.ErrInvalid } }
import boto3 import requests from requests_aws4auth import AWS4Auth region = 'us-east-1' # e.g. us-east-1 service = 'es' credentials = boto3.Session().get_credentials() awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token=credentials.token) host = 'vpc-healthevents-12cd1b220bcb-hthtzfkufqm6qmpgmboffwwum4.us-east-1.es.amazonaws.com' # the Amazon ES domain, with https:// #index = 'lambda-index' index = 'my-index' type = '_doc' url = host + '/' + index + '/' + type + '/' # + '/' payload = { "settings" : { "number_of_shards" : 7, "number_of_replicas" : 2 } } headers = { "Content-Type": "application/json" } def lambda_handler(event, context): print(host) try: r = requests.get(host, auth=awsauth, headers=headers) print(r) except Exception as e: print(e) print(awsauth) print(credentials.access_key) print(credentials.secret_key) print(credentials.token) count = 0 print(event) try: for record in event['Records']: # Get the primary key for use as the Elasticsearch ID id = record['dynamodb']['Keys']['id']['S'] print("tHe id obtained") print(id) if record['eventName'] == 'REMOVE': r = requests.delete(url + id, auth=awsauth) else: try: print("trying to send request") print("URL: "+ url + id) document = record['dynamodb']['NewImage'] print(document) #r = requests.put(url, auth=awsauth, json=document, headers=headers) r = requests.put(url+id, auth=awsauth, json=document, headers=headers) #r = requests.put(url, auth=awsauth, json=payload) print(r.text) print(r.content) print(r) print("request sent") except Exception as e: print(e) count += 1 print(str(count) + ' records processed.') print("done") except Exception as e: print("error: ") print(e) return "processed with errors" return str(count) + ' records processed.'
// // NSDictionary+KLNetworkModule.h // HttpManager // // Created by kalan on 2018/1/4. // Copyright © 2018年 kalan. All rights reserved. // #import <Foundation/Foundation.h> @interface NSDictionary (KLNetworkModule) - (NSString *)toJsonString; @end
/** * A {@link uk.org.webcompere.systemstubs.resource.TestResource} which provides the exit code called when it was active. * Gives access to the {@link NoExitSecurityManager} object inside via {@link SecurityManagerStub#getSecurityManager()}. * When the {@link NoExitSecurityManager} is in use, any calls to {@link System#exit(int)} are converted * to an {@link AbortExecutionException} which the surrounding test can catch. * @since 1.0.0 */ public class SystemExit extends SecurityManagerStub<NoExitSecurityManager> { /** * What was the exit code provided if System.exit was called. * @return exit code or <code>null</code> if no exit called */ @SuppressFBWarnings("BC_UNCONFIRMED_CAST_OF_RETURN_VALUE") public Integer getExitCode() { return getSecurityManager() == null ? null : getSecurityManager().getExitCode(); } @Override protected NoExitSecurityManager createSecurityManager() { return new NoExitSecurityManager(System.getSecurityManager()); } @Override protected void doSetup() throws Exception { // clear any previous security managers clearSecurityManager(); super.doSetup(); } }
<reponame>tradle/typeforce export type Raw = (value: any, strict?: boolean) => any export type RawMatcher <T> = (value: any, strict?: boolean) => value is T export interface Match <T> extends RawMatcher<T> { error: Error | null | undefined } export type Assert <T> = (value: any, strict?: boolean) => asserts value is T export type AnyRaw <T> = Raw | Match<T> | Assert<T> export type AssertType <T> = (type: AnyRaw<T>, value: T, strict?: boolean) => asserts value is T export interface Check<T> extends Raw { match: Match<T> assert: Assert<T> toJSON: () => any } export type DerivedCheck <T extends Raw, Value> = T extends (value: any, strict?: boolean) => value is any ? MatchCheck<Value> : T extends (value: any, strict?: boolean) => asserts value is any ? AssertCheck<Value> : Check<unknown> export interface MatchCheck <T> extends Match<T>, Check<T> {} export interface AssertCheck <T> extends Assert<T>, Check<T> {} export type CheckForRaw <T extends Raw> = T extends (value: any, strict?: boolean) => value is infer U ? MatchCheck<U> : T extends (value: any, strict?: boolean) => asserts value is infer U ? AssertCheck<U> : Check<unknown> export type TypeForCheck <T extends Raw> = T extends MatchCheck<infer U> ? U : T extends AssertCheck<infer U> ? U : T extends (value: any, strict?: boolean) => value is infer U ? U : T extends (value: any, strict?: boolean) => asserts value is infer U ? U : unknown export type Maybe <T> = T | null | undefined export interface ArrayOfOptions{ length?: number minLength?: number maxLength?: number } export type JITType <Input> = Input extends RawMatcher<infer T> ? T : Input extends Assert<infer T> ? T : TypeForCheck<Compiled<Input>> export type FlattenOr <T extends any[]> = T extends [infer A] ? TypeForAny<A> : T extends [infer A, ...infer R] ? TypeForAny<A> | FlattenOr<R> : unknown export type CompiledOr <T extends any[]> = T extends [infer A] ? TypeForCheck<Compiled<A>> : T extends [infer A, ...infer R] ? TypeForCheck<Compiled<A>> | CompiledOr<R> : unknown export interface ObjInput { [key: string]: Raw } export type ObjectTypes <Type extends ObjInput> = { [Property in keyof Type]: TypeForCheck<Type[Property]> } export interface Mapped <Value> { [key: string | number ]: Value } export type FlattenAnd <T extends any[]> = T extends [infer A] ? TypeForAny<A> : T extends [infer A, ...infer R] ? TypeForAny<A> & FlattenAnd<R> : unknown export type CompiledAnd <T extends any[]> = T extends [infer A] ? TypeForCheck<Compiled<A>> : T extends [infer A, ...infer R] ? TypeForCheck<Compiled<A>> & CompiledOr<R> : unknown export type TypeForAny <T> = T extends Raw ? TypeForCheck<T> : unknown export type Tuple <T extends any[]> = T extends [infer A] ? [TypeForAny<A>] : T extends [infer A, ...infer R] ? [TypeForAny<A>, ...Tuple<R>] : [] export type MaybeCompiled <T> = DerivedCheck<Compiled<T>, Maybe<TypeForCheck<Compiled<T>>>> export interface ObjectInput { [key: string]: any } export type ObjectCompiled <T extends ObjectInput> = Check<{ [Property in keyof T]: TypeForCheck<Compiled<T[Property]>> }> export type TupleCompiled<T extends any[]> = T extends [infer A] ? [Compiled<A>] : T extends [infer A, ...infer R] ? [Compiled<A>, ...TupleCompiled<R>] : [] export interface NativeTypes { String: MatchCheck<string> Number: MatchCheck<number> Validator: MatchCheck<Check<any>> Array: MatchCheck<any[]> Boolean: MatchCheck<boolean> Function: MatchCheck<Function> Nil: MatchCheck<null | undefined> Object: MatchCheck<Object> } export type TypeNameCompiled <Name extends string> = Name extends keyof NativeTypes ? NativeTypes[Name] : Check<Object> export type StringCompiled <T extends string> = T extends `?${infer A}` ? Maybe<TypeNameCompiled<A>> : TypeNameCompiled<T> // From: https://stackoverflow.com/a/50375286/62076 type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never // If T is `any` a union of both side of the condition is returned. type UnionForAny<T> = T extends never ? 'A' : 'B' // Returns true if type is any, or false for any other type. type IsStrictlyAny<T> = UnionToIntersection<UnionForAny<T>> extends never ? true : false type FunctionCompiled <T extends Function> = Check<TypeForAny<T>> export type Compiled <T> = IsStrictlyAny<T> extends true ? Check<any> : T extends null | undefined ? Check<null | undefined> : T extends string ? StringCompiled<T> : T extends Check<any> ? T : T extends Function ? FunctionCompiled<T> : T extends ObjectInput ? ObjectCompiled<T> : Check<T> export type AnyOfCompiled <T extends any[]> = Check<CompiledOr<T>> export type AllOfCompiled <T extends any[]> = Check<CompiledAnd<T>> export type ArrayCompiled <T> = Check<Array<TypeForCheck<Compiled<T>>>>
/** * Plays a sound with various effects applied to it. */ private static void playbackTest() throws Exception { setupEfx(); final int source = AL10.alGenSources(); final int buffer = AL10.alGenBuffers(); WaveData waveFile = WaveData.create("Footsteps.wav"); if (waveFile == null) { System.out.println("Failed to load Footsteps.wav! Skipping playback test."); AL.destroy(); return; } AL10.alBufferData(buffer, waveFile.format, waveFile.data, waveFile.samplerate); waveFile.dispose(); AL10.alSourcei(source, AL10.AL_BUFFER, buffer); AL10.alSourcei(source, AL10.AL_LOOPING, AL10.AL_TRUE); System.out.println("Playing sound unaffected by EFX ..."); AL10.alSourcePlay(source); Thread.sleep(7500); final int effectSlot = EFX10.alGenAuxiliaryEffectSlots(); final int reverbEffect = EFX10.alGenEffects(); EFX10.alEffecti(reverbEffect, EFX10.AL_EFFECT_TYPE, EFX10.AL_EFFECT_REVERB); EFX10.alEffectf(reverbEffect, EFX10.AL_REVERB_DECAY_TIME, 5.0f); EFX10.alAuxiliaryEffectSloti(effectSlot, EFX10.AL_EFFECTSLOT_EFFECT, reverbEffect); AL11.alSource3i(source, EFX10.AL_AUXILIARY_SEND_FILTER, effectSlot, 0, EFX10.AL_FILTER_NULL); System.out.println("Playing sound with reverb ..."); AL10.alSourcePlay(source); Thread.sleep(7500); final int filter = EFX10.alGenFilters(); EFX10.alFilteri(filter, EFX10.AL_FILTER_TYPE, EFX10.AL_FILTER_LOWPASS); EFX10.alFilterf(filter, EFX10.AL_LOWPASS_GAIN, 0.5f); EFX10.alFilterf(filter, EFX10.AL_LOWPASS_GAINHF, 0.5f); AL10.alSourcei(source, EFX10.AL_DIRECT_FILTER, filter); System.out.println("Playing sound with reverb and direct low pass filter ..."); AL10.alSourcePlay(source); Thread.sleep(7500); AL10.alSourcei(source, EFX10.AL_DIRECT_FILTER, EFX10.AL_FILTER_NULL); AL11.alSource3i(source, EFX10.AL_AUXILIARY_SEND_FILTER, EFX10.AL_EFFECTSLOT_NULL, 0, EFX10.AL_FILTER_NULL); EFX10.alAuxiliaryEffectSloti(effectSlot, EFX10.AL_EFFECTSLOT_EFFECT, EFX10.AL_EFFECT_NULL); EFX10.alDeleteEffects(reverbEffect); EFX10.alDeleteFilters(filter); final int echoEffect = EFX10.alGenEffects(); EFX10.alEffecti(echoEffect, EFX10.AL_EFFECT_TYPE, EFX10.AL_EFFECT_ECHO); EFX10.alAuxiliaryEffectSloti(effectSlot, EFX10.AL_EFFECTSLOT_EFFECT, echoEffect); AL11.alSource3i(source, EFX10.AL_AUXILIARY_SEND_FILTER, effectSlot, 0, EFX10.AL_FILTER_NULL); System.out.println("Playing sound with echo effect ..."); AL10.alSourcePlay(source); Thread.sleep(7500); AL.destroy(); }
/** * Gestisce il salvataggio delle condizioni di rank * * @param sem gui */ public static void saveRank(SemGui sem) { FileOutputStream fout; try { String path = sem.getPercorsoIndice().getText() + "/evaluations.rank"; fout = new FileOutputStream(new File(path)); } catch (Exception e) { LogGui.printException(e); return; } try { RankEvaluations evs = sem.getEvaluations(); evs.getEvaluators().clear(); DefaultTableModel model = (DefaultTableModel) sem.getRankTable().getModel(); int rowCount = model.getRowCount(); for (int currentRow = 0; currentRow < rowCount; currentRow++) { String fieldName = (String) model.getValueAt(currentRow, 1); String fieldCondition = (String) model.getValueAt(currentRow, 2); String fieldRankValue = (String) model.getValueAt(currentRow, 3); String rankDuration = (String) model.getValueAt(currentRow, 4); String durationValue = (String) model.getValueAt(currentRow, 5); String startYear = (String) model.getValueAt(currentRow, 6); String endYear = (String) model.getValueAt(currentRow, 7); String rank = (String) model.getValueAt(currentRow, 8); try { RankEvaluator ev = new RankEvaluator(fieldName, fieldCondition, fieldRankValue, Double.parseDouble(rank)); if (durationValue != null && durationValue.length() > 0) { ev.setDurationCondition(rankDuration); ev.setDuration(Double.parseDouble(durationValue)); } if (startYear != null && startYear.length() > 0) { ev.setStartYear(Integer.parseInt(startYear)); } if (endYear != null && endYear.length() > 0) { ev.setEndYear(Integer.parseInt(endYear)); } evs.addRule(ev); } catch (Exception e) { LogGui.printException(e); } } ObjectOutputStream oos = new ObjectOutputStream(fout); oos.writeObject(evs); } catch (Exception e) { LogGui.printException(e); return; } try { fout.close(); } catch (Exception e) { LogGui.printException(e); } }
The brickbats were flying even before President Obama convened his first official Cabinet meeting yesterday. At the session, Obama ordered his agency heads to identify and shave a collective $100 million in administrative costs from federal programs in a budget of well over $3 trillion. "At the same time they're looking for millions in savings, the president's budget calls for adding trillions to the debt," said Senate Minority Leader Mitch McConnell (R-Ky.). "The nation's debt is at its highest level ever, but under the administration's budget, the amount of public debt will double in five years and triple in 10." Framed by members of his Cabinet, the president himself acknowledged that the goal amounts to a drop in the bucket. "It is, and that's what I just said," he told reporters. "None of these things alone are going to make a difference. But cumulatively they make an extraordinary difference because they start setting a tone. And so what we are going to do is, line by line, page by page, $100 million there, $100 million here, pretty soon, even in Washington, it adds up to real money." In a frenetic first three months in office, Obama has seen his $787 billion economic stimulus plan enacted and the outlines of his $3.5 trillion budget passed, while overseeing hundreds of billions of dollars in outlays to stabilize the nation's teetering financial system and its imploding housing market. But that may prove to be the easy part. With Congress back from its spring recess and many of the big, expensive pieces of Obama's plan for turning the economy around now in place, the president is pivoting to the nitty-gritty details of implementing his plans to expand health care, encourage production of renewable energy and improve education -- all while demonstrating he is serious about cutting the federal deficit. With that in mind, Obama called his first official meeting of the Cabinet, which for modern presidents serves as less a policymaking session than a forum for conveying presidential authority. This is particularly true for Obama, whose White House has multiple policymaking "czars" coordinating activities on issues from climate change to health care. During his years in office, President George W. Bush was known for seeding his Cabinet with people who were personally close to him, while running policy mostly through the White House, leaving agencies as purveyors of those ideas. Obama, meanwhile, has assembled a governing team notable for its independence and star power, but until yesterday he had never met with its members officially as a group. "The Cabinet is for pictures and stories and publicity," said Bradley H. Patterson Jr., who has worked for three administrations and has written several books on the inner workings of the White House. Yesterday was such an occasion. Surrounded by the top administration officials, Obama said his team is aware of the need to cut spending over the long haul. "One of the things that everybody here is mindful of as we move forward, dealing with this extraordinary economic crisis, we also have a deficit, a confidence gap, when it comes to the American people," Obama said. "And we've got to earn their trust. They've got to feel confident that their dollars are being spent wisely." As a start, the president set the $100 million goal for cutting administrative costs across the government. The White House said that process already has yielded some savings: The Department of Veterans Affairs canceled or delayed 26 conferences. The Education Department is no longer allowing employees to have both laptop and desktop computers. The Agriculture Department is terminating leases and doing more to verify the income of recipients of farm subsidies. And the Department of Homeland Security is going to start buying its office supplies in bulk. The relatively small savings from those measures have drawn ridicule from Obama's conservative critics, many of whom have been critical of his spending plans. "To put those numbers in perspective, imagine that the head of a household with annual spending of $100,000 called everyone in the family together to deal with a $34,000 budget shortfall," Harvard University economist N. Greg Mankiw, a Bush administration official, wrote on his blog. "How much would he or she announce that spending [be] cut? By $3 over the course of the year -- approximately the cost of one latte at Starbucks. The other $33,997? We can put that on the family credit card and worry about it next year." Meanwhile, the administration is learning that those small savings will come easier than the larger ones officials are eyeing. Administration plans to have the government directly administer all federal students loans, cutting out banks and saving $94 billion over the next decade, have run into bipartisan opposition on Capitol Hill. Defense Secretary Robert M. Gates's blueprint to shift billions in defense spending has also met with a mixed reaction from lawmakers. Obama's proposal to end automatic subsidy payments for big farmers and capping subsidy payments at $250,000 has been derided by some farm state lawmakers. Today, the Senate Finance Committee will hold the first of three roundtable discussions on improving health-care services and improving efficiency, another step in Democratic leaders' plans to pass a health-care reform bill by the summer. In the near future, the House committees will begin work on cap-and-trade proposals to reduce carbon emissions. And Congress will also be working to fill in details of Obama's budget outline. All of this will come against a backdrop of opposition from Republicans, who accuse the president of spending too freely -- a perception Obama hopes to dash. "None of these savings by themselves are going to solve our long-term fiscal problem," Obama said. "But taken together they can make a difference, and they send a signal that we are serious about changing how government operates."
import { Component, OnInit } from '@angular/core'; import {ToastComponent} from '../../../shared/toast/toast.component'; import {Playlist} from '../../../shared/models/playlist.model'; import {PlaylistService} from '../../../services/playlist.service'; import {ActivatedRoute} from '@angular/router'; import {YoutubeService} from '../../../services/youtube.service'; import {SongService} from '../../../services/song.service'; import * as url from 'url'; import {Song} from '../../../shared/models/song.model'; import {Location} from '@angular/common'; @Component({ selector: 'app-dashboard-add-song', templateUrl: './dashboard-add-song.component.html', styleUrls: ['./dashboard-add-song.component.scss'] }) export class DashboardAddSongComponent implements OnInit { playlist: Playlist; results: any = null; keywords: String = null; constructor( public toast: ToastComponent, private playlistService: PlaylistService, private route: ActivatedRoute, private youtubeService: YoutubeService, private songService: SongService, private location: Location ) { } getPlaylist() { this.playlistService.getById({_id: this.route.snapshot.paramMap.get('id')}).subscribe( res => { this.playlist = res; }, err => { console.error(err); } ); } ngOnInit() { this.getPlaylist(); } back() { this.location.back(); } resetSearch() { this.results = null; this.keywords = null; } searchSong() { this.keywords = this.keywords.trim(); if (this.keywords !== '') { this.youtubeService.youtubeSearch({ query: this.keywords, maxResults: 10 }).subscribe( res => { this.results = res; this.toast.setMessage('Recherche terminée !', 'success'); }, err => { this.toast.setMessage('Une erreur est survenue !', 'danger'); } ); } } addSong(song: any) { this.resetSearch(); const songItem = new Song(); songItem.title = song.title; songItem.url = song.link; songItem.playlist = this.playlist; this.songService.add(songItem).subscribe( res => { this.toast.setMessage('Chanson ajoutée !', 'success'); this.back(); }, err => { this.toast.setMessage('Une erreur est survenue !', 'danger'); } ); } }
/** * The OperationExecution class is a wrapper for the results recorded from the execution of an operation. This wrapper * can be passed off to an IScoreboard to be recorded and presented at a later time. */ public class OperationExecution { final public String operationName; final public String operationRequest; final public LoadDefinition generatedDuring; final public boolean async; final public boolean failed; final public long timeStarted; final public long timeFinished; final public long profileStartTime; final public long actionsPerformed; private TraceLabels traceLabel = TraceLabels.NO_TRACE_LABEL; /** * Copy constructor */ public OperationExecution(Operation operation) { this.timeStarted = operation.getTimeStarted(); this.timeFinished = operation.getTimeFinished(); this.operationName = operation.getOperationName(); this.operationRequest = operation.getOperationRequest(); this.async = operation.isAsync(); this.failed = operation.isFailed(); this.generatedDuring = operation.getLoadDefinition(); this.profileStartTime = operation.getLoadDefinitionStartTime(); this.actionsPerformed = operation.getNumberOfActionsPerformed(); } public TraceLabels getTraceLabel() { return this.traceLabel; } public void setTraceLabel(TraceLabels label) { this.traceLabel = label; } public long getExecutionTime() { return timeFinished - timeStarted; } }
Speakers during the opening panel of Consensus 2017 agreed that blockchain is set to go global – but differed on how, exactly, the technology will get to that point. Vincent Wang, chief innovation officer for China Wanxiang Group, argued that the path to success means reaching out to the businesses and industries that haven’t yet begun any kind of meaningful experimentation. “If blockchain is to take off, which I believe it will, we need to address the silent majority that does not often speak up, that is the industries who will see value,” he said. He advocated that startups working with the tech aim for bold solutions that only blockchain can bring – while at the same time calling for new companies to work more closely with local regulators. Wanxiang launched its own startup accelerator, focused on applications like connected devices and next-generation cities, in March. “It is about harvesting the vast value that in many cases is enabled by blockchain only,” he stressed. Not everyone on the panel shared that view about regulation. Nick Giurietto, CEO of the Australian Digital Currency Commerce Association (ADCCA), argued that reducing regulatory barriers would spur further adoption. “Investments decisions won’t be made in an environment where there is regulatory uncertainly,” he argued, adding that the traditionally slow process of updating regulations produces additional barriers. Working together Collaboration was a major theme during the panel, with participants discussing the approaches taken by governments and regulatory changes – and how they need to come around to the idea that they cannot act alone. N.T. Arunkumar, special representative for IT & Innovation for the government of Andhra Pradesh, a state in India, said that opening up communication across borders is key. “There needs to be more cross-border orchestration,” he said, going to add: “We definitely believe after the internet, this is the next big thing that will change every aspect of person’s life.” Yet that change is likely to take time – a point recognized by the panelists. “China is known for strategic patience when it is known for economic polices,” said Wang, voicing his belief that China’s interest in blockchain isn’t going anywhere soon. Image by Amy Castor for CoinDesk
<reponame>hdb3/kakapo #include "bytestring.h" #include <stdint.h> #include <stdio.h> struct bytestring pas2bytestring(char *pa, ...); struct bytestring pa2bytestring(char *pa); extern char paOrigin[]; char *paNextHop(uint32_t nexthop); char *paMED(uint32_t med); char *paLocalPref(uint32_t localpref); char *paASPATH(uint32_t *asn); char *rewriteASPATH(char *aspathattribute, uint32_t as, uint8_t index);
def create_account(self, name): params = { 'name': name, } response = self.json_api_call('POST', '/accounts/v1/account/create', params) return response
<gh_stars>1-10 import { Configuration } from 'webpack'; import { SpawnSyncOptions, } from 'child_process'; import spawn from 'cross-spawn'; export type Config = { port?: number, npmClient?: NpmClient, framework?: Framework, rootPath?: string, buildDirectory?: string, entrypointDirectory?: string, entrypointWebpack?: (context: ConfigWebpackContext) => Configuration, rendererDirectory?: string, rendererBuildPaths?: string[], rendererBuildCommands?: (context: ConfigCommandContext) => any | undefined, rendererDevCommands?: (context: ConfigCommandContext) => any | undefined, rendererWebpack?: (context: ConfigWebpackContext) => Configuration }; export type ConfigCommandContext = { env: Environment rootPath: string, exec: typeof spawn, execSync: typeof spawn.sync, execOptions: SpawnSyncOptions }; export type ConfigWebpackContext = { webpackConfig: Configuration, rootConfig: Config, env: Environment }; export type NpmClient = 'npm' | 'yarn'; export type Framework = 'nuxt' | 'next'; export type Environment = 'production' | 'development' | 'test'; export type RegisterRenderedFilesContext = { isCorsEnabled?: boolean, scheme?: string directory: string, partition?: string };
<reponame>DonaldMcC/py4web<gh_stars>1-10 """ This file defines cache, session, and translator T object for the app These are fixtures that every app needs so probably you will not be editing this file """ import os import sys import logging from py4web import Session, Cache, Translator, Flash, DAL, Field, action from py4web.utils.mailer import Mailer from py4web.utils.auth import Auth from py4web.utils.downloader import downloader from py4web.utils.tags import Tags from py4web.utils.factories import ActionFactory from . import settings # ####################################################### # implement custom loggers form settings.LOGGERS # ####################################################### logger = logging.getLogger("py4web:" + settings.APP_NAME) formatter = logging.Formatter( "%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s" ) for item in settings.LOGGERS: level, filename = item.split(":", 1) if filename in ("stdout", "stderr"): handler = logging.StreamHandler(getattr(sys, filename)) else: handler = logging.FileHandler(filename) handler.setFormatter(formatter) logger.setLevel(getattr(logging, level.upper(), "DEBUG")) logger.addHandler(handler) # ####################################################### # connect to db # ####################################################### db = DAL( settings.DB_URI, folder=settings.DB_FOLDER, pool_size=settings.DB_POOL_SIZE, migrate=settings.DB_MIGRATE, fake_migrate=settings.DB_FAKE_MIGRATE, ) # ####################################################### # define global objects that may or may not be used by the actions # ####################################################### cache = Cache(size=1000) T = Translator(settings.T_FOLDER) flash = Flash() # ####################################################### # pick the session type that suits you best # ####################################################### if settings.SESSION_TYPE == "cookies": session = Session(secret=settings.SESSION_SECRET_KEY) elif settings.SESSION_TYPE == "redis": import redis host, port = settings.REDIS_SERVER.split(":") # for more options: https://github.com/andymccurdy/redis-py/blob/master/redis/client.py conn = redis.Redis(host=host, port=int(port)) conn.set = ( lambda k, v, e, cs=conn.set, ct=conn.ttl: cs(k, v, ct(k)) if ct(k) >= 0 else cs(k, v, e) ) session = Session(secret=settings.SESSION_SECRET_KEY, storage=conn) elif settings.SESSION_TYPE == "memcache": import memcache, time conn = memcache.Client(settings.MEMCACHE_CLIENTS, debug=0) session = Session(secret=settings.SESSION_SECRET_KEY, storage=conn) elif settings.SESSION_TYPE == "database": from py4web.utils.dbstore import DBStore session = Session(secret=settings.SESSION_SECRET_KEY, storage=DBStore(db)) # ####################################################### # Instantiate the object and actions that handle auth # ####################################################### auth = Auth(session, db, define_tables=False) auth.use_username = True auth.param.registration_requires_confirmation = settings.VERIFY_EMAIL auth.param.registration_requires_approval = settings.REQUIRES_APPROVAL auth.param.allowed_actions = settings.ALLOWED_ACTIONS auth.param.login_expiration_time = 3600 auth.param.password_complexity = {"entropy": 50} auth.param.block_previous_password_num = 3 auth.define_tables() # ####################################################### # Configure email sender for auth # ####################################################### if settings.SMTP_SERVER: auth.sender = Mailer( server=settings.SMTP_SERVER, sender=settings.SMTP_SENDER, login=settings.SMTP_LOGIN, tls=settings.SMTP_TLS, ssl=settings.SMTP_SSL, ) # ####################################################### # Create a table to tag users as group members # ####################################################### if auth.db: groups = Tags(db.auth_user, "groups") # ####################################################### # Enable optional auth plugin # ####################################################### if settings.USE_PAM: from py4web.utils.auth_plugins.pam_plugin import PamPlugin auth.register_plugin(PamPlugin()) if settings.USE_LDAP: from py4web.utils.auth_plugins.ldap_plugin import LDAPPlugin auth.register_plugin(LDAPPlugin(db=db, groups=groups, **settings.LDAP_SETTINGS)) if settings.OAUTH2GOOGLE_CLIENT_ID: from py4web.utils.auth_plugins.oauth2google import OAuth2Google # TESTED auth.register_plugin( OAuth2Google( client_id=settings.OAUTH2GOOGLE_CLIENT_ID, client_secret=settings.OAUTH2GOOGLE_CLIENT_SECRET, callback_url="auth/plugin/oauth2google/callback", ) ) if settings.OAUTH2FACEBOOK_CLIENT_ID: from py4web.utils.auth_plugins.oauth2facebook import OAuth2Facebook # UNTESTED auth.register_plugin( OAuth2Facebook( client_id=settings.OAUTH2FACEBOOK_CLIENT_ID, client_secret=settings.OAUTH2FACEBOOK_CLIENT_SECRET, callback_url="auth/plugin/oauth2facebook/callback", ) ) if settings.OAUTH2OKTA_CLIENT_ID: from py4web.utils.auth_plugins.oauth2okta import OAuth2Okta # TESTED auth.register_plugin( OAuth2Okta( client_id=settings.OAUTH2OKTA_CLIENT_ID, client_secret=settings.OAUTH2OKTA_CLIENT_SECRET, callback_url="auth/plugin/oauth2okta/callback", ) ) # ####################################################### # Define a convenience action to allow users to download # files uploaded and reference by Field(type='upload') # ####################################################### if settings.UPLOAD_FOLDER: @action('download/<filename>') @action.uses(db) def download(filename): return downloader(db, settings.UPLOAD_FOLDER, filename) # To take advantage of this in Form(s) # for every field of type upload you MUST specify: # # field.upload_path = settings.UPLOAD_FOLDER # field.download_url = lambda filename: URL('download/%s' % filename) # ####################################################### # Optionally configure celery # ####################################################### if settings.USE_CELERY: from celery import Celery # to use "from .common import scheduler" and then use it according # to celery docs, examples in tasks.py scheduler = Celery( "apps.%s.tasks" % settings.APP_NAME, broker=settings.CELERY_BROKER ) # ####################################################### # Enable authentication # ####################################################### auth.enable(uses=(session, T, db), env=dict(T=T)) # ####################################################### # Define convenience decorators # ####################################################### unauthenticated = ActionFactory(db, session, T, flash, auth) authenticated = ActionFactory(db, session, T, flash, auth.user)
// DeleteRuleCtx will delete the given rule func (c *Client) DeleteRuleCtx(ctx context.Context, ruleID string) (Rule, error) { req := graphql.NewRequest(`mutation($ruleID: ID!) { deleteRule(ruleID: $ruleID) {` + allRuleFields + ` } }`) req.Var("ruleID", ruleID) var res struct { DeleteRule Rule `json:"deleteRule"` } if err := c.makeRequest(ctx, req, &res); err != nil { return Rule{}, err } return res.DeleteRule, nil }
/** * Returns a global reference to the class matching the name */ jclass bindJavaClass(JNIEnv * env, const char * name) { jclass tempClass; tempClass = env->FindClass(name); if (tempClass == NULL) { return NULL; } else { jclass classRef = (jclass) env->NewGlobalRef(tempClass); if (classRef == NULL) { return NULL; } else { return classRef; } } }
/** * Copyright 2021-2022 Huawei Technologies Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "nnacl/fp32/mul_fp32.h" #include "nnacl/fp32/arithmetic_fp32.h" #include "nnacl/mul_fp32_simd.h" #include "nnacl/errorcode.h" int BroadcastMul(const float *in0, const float *in1, float *tile_in0, float *tile_in1, float *out, int size, ArithmeticParameter *param) { TileDimensionsFp32(in0, in1, tile_in0, tile_in1, param); return ElementMul(tile_in0, tile_in1, out, size); } int ElementMul(const float *in0, const float *in1, float *out, int size) { int index = 0; SIMD_RUN_NO_SCALAR(ElementMul, index, in0, in1, out, size); for (; index < size; index++) { out[index] = in0[index] * in1[index]; } return NNACL_OK; } int ElementMulRelu(const float *in0, const float *in1, float *out, int size) { int index = 0; SIMD_RUN_NO_SCALAR(ElementMulRelu, index, in0, in1, out, size); for (; index < size; index++) { float res = in0[index] * in1[index]; out[index] = res > 0 ? res : 0; } return NNACL_OK; } int ElementMulRelu6(const float *in0, const float *in1, float *out, int size) { int index = 0; SIMD_RUN_NO_SCALAR(ElementMulRelu6, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMIN(MSMAX(in0[index] * in1[index], 0), 6); } return NNACL_OK; } int ElementMulInt(const int32_t *in0, const int32_t *in1, int32_t *out, int size) { int index = 0; SIMD_RUN_NO_SCALAR(ElementMulInt, index, in0, in1, out, size); for (; index < size; index++) { out[index] = in0[index] * in1[index]; } return NNACL_OK; } int ElementMulReluInt(const int32_t *in0, const int32_t *in1, int32_t *out, int size) { int index = 0; SIMD_RUN_NO_SCALAR(ElementMulReluInt, index, in0, in1, out, size); for (; index < size; index++) { int res = in0[index] * in1[index]; out[index] = res > 0 ? res : 0; } return NNACL_OK; } int ElementMulRelu6Int(const int32_t *in0, const int32_t *in1, int32_t *out, int size) { int index = 0; SIMD_RUN_NO_SCALAR(ElementMulRelu6Int, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMIN(MSMAX(in0[index] * in1[index], 0), 6); } return NNACL_OK; } int ElementOptMul(const float *in0, const float *in1, float *out, int size, bool first_scalar) { int index = 0; if (first_scalar) { SIMD_RUN_NO_SCALAR(ElementOptMulNum0, index, in0, in1, out, size); for (; index < size; index++) { out[index] = in0[0] * in1[index]; } } else { SIMD_RUN_NO_SCALAR(ElementOptMulNum1, index, in0, in1, out, size); for (; index < size; index++) { out[index] = in0[index] * in1[0]; } } return NNACL_OK; } int ElementOptMulRelu(const float *in0, const float *in1, float *out, int size, bool first_scalar) { int index = 0; if (first_scalar) { SIMD_RUN_NO_SCALAR(ElementOptMulReluNum0, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMAX(in0[0] * in1[index], 0); } } else { SIMD_RUN_NO_SCALAR(ElementOptMulReluNum1, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMAX(in0[index] * in1[0], 0); } } return NNACL_OK; } int ElementOptMulRelu6(const float *in0, const float *in1, float *out, int size, bool first_scalar) { int index = 0; if (first_scalar) { SIMD_RUN_NO_SCALAR(ElementOptMulRelu6Num0, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMIN(MSMAX(in0[0] * in1[index], 0), 6); } } else { SIMD_RUN_NO_SCALAR(ElementOptMulRelu6Num1, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMIN(MSMAX(in0[index] * in1[0], 0), 6); } } return NNACL_OK; } int ElementOptMulInt(const int32_t *in0, const int32_t *in1, int32_t *out, int size, bool first_scalar) { int index = 0; if (first_scalar) { SIMD_RUN_NO_SCALAR(ElementOptMulIntNum0, index, in0, in1, out, size); for (; index < size; index++) { out[index] = in0[0] * in1[index]; } } else { SIMD_RUN_NO_SCALAR(ElementOptMulIntNum1, index, in0, in1, out, size); for (; index < size; index++) { out[index] = in0[index] * in1[0]; } } return NNACL_OK; } int ElementOptMulReluInt(const int32_t *in0, const int32_t *in1, int32_t *out, int size, bool first_scalar) { int index = 0; if (first_scalar) { SIMD_RUN_NO_SCALAR(ElementOptMulReluIntNum0, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMAX(in0[0] * in1[index], 0); } } else { SIMD_RUN_NO_SCALAR(ElementOptMulReluIntNum1, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMAX(in0[index] * in1[0], 0); } } return NNACL_OK; } int ElementOptMulRelu6Int(const int32_t *in0, const int32_t *in1, int32_t *out, int size, bool first_scalar) { int index = 0; if (first_scalar) { SIMD_RUN_NO_SCALAR(ElementOptMulRelu6IntNum0, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMIN(MSMAX(in0[0] * in1[index], 0), 6); } } else { SIMD_RUN_NO_SCALAR(ElementOptMulRelu6IntNum1, index, in0, in1, out, size); for (; index < size; index++) { out[index] = MSMIN(MSMAX(in0[index] * in1[0], 0), 6); } } return NNACL_OK; }
<filename>app.ts //Import required namespaces import express, { Request, Response, NextFunction } from "express"; import { UserDto } from "./DTOs/UserDto"; //Add required controller import UsersController from "./Controllers/UsersController"; //Construct our single express instance and define the port we want to use const app = express(); const port = 3000; //Initialise our controller var usersController = new UsersController(); //Register our routes using the controller and path app.use(usersController.path, usersController.router); //Create endpoint to return static data const serverRunningMessage = (req: Request, res: Response, next: NextFunction) => { res.status(200).send("The server is running!"); }; //Define get endpoint to return static data using our function above app.get("/", serverRunningMessage); //Start the application on our desired port app.listen(port, () => { console.log(`Currently listening on port ${port}`); });
// get ip string from ipv4 addr inline fastring ip_str(struct sockaddr_in* addr) { char s[INET_ADDRSTRLEN] = { 0 }; inet_ntop(AF_INET, &addr->sin_addr, s, sizeof(s)); return fastring(s); }
// ipNetToLabel turns a CIDR into a Label object which can be used to create // EndpointSelector objects. func ipNetToLabel(cidr *net.IPNet) labels.Label { ones, _ := cidr.Mask.Size() lblStr := maskedIPToLabelString(&cidr.IP, ones) return labels.ParseLabel(lblStr) }
Novel mutations in SLC6A5 with benign course in hyperekplexia Infants suffering from life-threatening apnea, stridor, cyanosis, and increased muscle tone may often be misdiagnosed with infantile seizures and inappropriately treated because of lack and delay in genetic diagnosis. Here, we report a patient with increased muscle tone after birth and hypertonic attacks with life-threatening apnea but no epileptiform patterns in EEG recordings. We identified novel compound heterozygous variants in SLC6A5 (NM_004211.4:c. ; ) by trio whole-exome sequencing, containing a base deletion inherited by the asymptomatic mother leading to a frameshift (c.1430delC, p.Ser477PhefsTer9) and a de novo base exchange leading to an amino acid change (c.1429T > C, p.Ser477Pro). To date, there are four known disease-associated genes for primary hyperekplexia, all of which are involved in the functioning of glycinergic synapses. SLC6A5 encodes the sodium- and chloride-dependent glycine transporter 2 (GlyT2), which recaptures glycine, a major inhibitory transmitter in the brainstem and spinal cord. The diagnosis altered the patient's medical care to his benefit because SLC6A5 mutations with rather benign courses of hyperekplexia may be spared of needless pharmacotherapy. Symptoms eventually decreased in frequency until about once in 2 mo at 2 yr age. We present the first report of halting hyperekplexia episodes by maternal soothing in multiple instances. We highlight the importance of clarifying the genetic diagnosis by rapid next-generation sequencing techniques in this group of infantile apneic attacks with hyperekplexia due to the broad differential diagnoses. INTRODUCTION Patients with sudden apneic attacks often present with diffuse patient histories and challenge pediatric physicians. Infant apnea may stem from anatomical obstructive or various central causes, oftentimes puzzling on-call physicians. Environmental or acquired etiologies of apneic attacks such as infections, premature birth, body temperature, sleep status, body position, and nicotine exposure play a major role in the differential diagnosis (Gao et al. 2017;Oishi et al. 2018). However, physicians are also oftentimes confronted with inborn genetic errors of the central nervous system-for example, congenital central hypoventilation with PHO2XB or LBX1 mutations (Hernandez-Miranda et al. 2018;Zaidi et al. 2018) or congenital myasthenic syndromes with episodic apnea due to pathogenic variants in CHAT (Mallory et al. 2009). not observed any pathological reflexes, delay of motor development, nor radiological signs in transfontanellar sonography that would suggest a beginning cerebral palsy. Of note, clubfeet may be a sign of an increased muscle tone . In our patient, the clubfeet were treated with an orthopedic redression in plaster and left no remaining defect on feet posture or gait. Genomic Analyses To uncover the genetic cause of the attacks, we first performed Mendeliome sequencing (see Methods and Supplemental Table S4; Fazeli et al. 2016). After initial filtering, we had only observed one variant in SLC6A5 and initially no other conclusive results that could have explained the clinical features sufficiently. In search for the genetic diagnosis, we performed trio whole-exome sequencing (WES) (see Methods and Supplemental Tables S5-S7), which revealed a compound heterozygous variant in SLC6A5 (NM_004211.4:c. ; ), consisting of a deletion inherited by the mother (c.1430delC, p.Ser477PhefsTer9) and a (likely) de novo base exchange (c.1429T > C, p.Ser477Pro) in the patient (see Fig. 1). The patient has a novel variant, which was previously not found in ClinVar or gnomAD. However, the mother's variant has a reported frequency of 2/246158 in gnomAD. Both variants have been listed now in ClinVar (SCV000897641 and SCV000897642). The bioinformatic reanalysis and annotation of the Mendeliome next-generation sequencing data confirmed the results as well, which was found by the trio WES results in SLC6A5 as shown in Supplemental Tables S5-S7. To double-check the sequences and the phasing, we subcloned PCR products of the peripheral blood DNA of the index patient and his mother in order to split the SLC6A5 alleles into separate plasmids. We used TOPO-TA cloning and subsequently performed a plasmid purification and dideoxy sequencing of the alleles in the plasmid separately (see Methods). Figure 1. (A) Pedigree and chromatogram of the two variants in SLC6A5. The mother is a heterozygous carrier of a deletion (c.1430delC, p.Ser477PhefsTer9). The patient carries the deletion inherited by the mother and additionally showed another heterozygous de novo mutation (c.1429T > C, p.Ser477Pro). (B) Review of alignments in patient's compound heterozygous variant from whole-exome sequencing (WES) with the two mutations located next to each other (varbank; https://varbank.ccg.uni-koeln.de). B A Novel mutations in SLC6A5 with hyperekplexia The dideoxy sequencing results confirmed these variants in the patient and his mother (see Fig. 2). We also performed kinship analysis to confirm that the patient is indeed the biological offspring of these parents (see Supplemental Methods; Supplemental Table S3). SLC6A5 encodes a sodium-and chloride-dependent glycine neurotransmitter transporter (GlyT2) consisting of 12 transmembrane regions. The mutations on position p.Ser477 are located in transmembrane region 6 (see Fig. 3), which has a role in transporter binding and transporter activity. Position p.Ser477 is expected to serve as one of eight sodium-binding sites by similarity to DAT (sodium-dependent dopamine transporter) according to a previously published analysis (Benito-Muñoz et al. 2018). To elaborate on the importance of the residue p.Ser477 for metal binding, multiple sequence alignment was performed for SLC6A5 between the sodium-dependent dopamine transporter from Drosophila melanogaster (DAT_DROME) as well as members of the human SLC6 family of sodium-and chloride-dependent neurotransmitter transporters (see Supplemental Fig. S1). The alignment was generated using the NCBI HomoloGene Protein Multiple Alignment platform, analyzed with the MUSCLE algorithm (Edgar 2004), provided by the Jalview web service, and visualized with the Jalview online tool (version 2.11.0) (Waterhouse et al. 2009). The secondary structure was predicted with the JPred Secondary Structure Prediction tool (Drozdetskiy and Cole 2015). Amino acid color labels were selected for the block substitution matrix 62. In addition, a copy-number variant (CNV) analysis was performed to ensure we have not overlooked any other pathogenic variant. However, none of the detected CNVs shows any relevance to the phenotype (see Supplemental Tables S8-S10). DISCUSSION We report a 2-yr-old boy from non-consanguineous German parents with tonic-jittery attacks with tachycardia and intermittently aggravated hypertonic phases in extremities that led to life-threatening apnea with cyanosis immediately after birth. In deciphering the underlying etiology, we identified novel compound heterozygous variants in SLC6A5 (NM_004211.4: , containing a deletion inherited by the asymptomatic mother (c.1430delC, p.Ser477PhefsTer9) and a likely de novo base exchange (c.1429T > C, p.Ser477Pro). A plausible alternative mode of inheritance may be germline mosaicism of the patient's father. Congenital hyperekplexia is a rare, potentially treatable neurogenetic disorder, and the diagnosis has been largely based on clinical findings (Thomas 2015). Affected individuals typically show generalized stiffness immediately or soon after birth. An excessive startle reflex to unexpected stimuli is the second main feature in hyperekplexia. Startle episodes are reported in some cases before birth (Thomas 2015). Although the symptoms are clearly defined, hyperekplexia can be confused with neonatal epilepsy, thus delaying diagnosis. Patients with hyperekplexia stay conscious during the tonic-jittery attacks, which distinguishes the disease from epileptic seizures. Although the symptoms often diminish during the first years of life, the excessive startle response can persist well into adulthood, leading to serious injuries from unprotected falls (Bode and Lynch 2014). Although no epileptiform discharges were observed, the patient was treated with antiepileptic medication (levetiracetam) for 6 mo because the use of levetiracetam in a child with hyperekplexia was reported to have dramatically decreased the frequency of attacks Figure 3. Visualization of SLC6A5 as a transmembrane protein with 12 transmembrane regions, and the mutation in our patient on position p. Ser477 in region 6 is marked in red with a red arrow, as well as the missense mutations (p.Trp482Arg, p.Asn509Ser), nonsense mutation (p.Tyr377Ter), and frameshift mutation (p. Val432PhefsTer99) (Rees et al. 2001;Benito-Muñoz et al. 2018). Position 477 is known as a Na + -binding site. Each amino acid is abbreviated by its first-letter code. Novel mutations in SLC6A5 with hyperekplexia (Hussain et al. 2013). After the introduction of levetiracetam in our patient, we have observed a relaxation of initially increased muscle tone, but there were no changes in the hyperekplexia attacks. Thus, we suggest that a rather benign course of hyperekplexia with SLC6A5 mutations may render a continuous pharmacotherapeutic management dispensable. However, it should be debated on the basis of the individual case if the patient may benefit from a preventive pharmacotherapy in the first 3 mo of life because of severe apnea attacks or a pro re nata (PRN) medication (e.g., buccal midazolam). Of note, we present the first report of halting hyperekplexia attacks with maternal soothing and avoiding surrounding auditory stimuli. In GlyT2-KO mice, glycinergic inhibition, such as in hyperekplexia, has proven to be mainly relevant in feedback regulation of respiratory reflexes. GlyT2 inhibition leads to depletion of intracellular glycine storage and limitation of glycine accumulation in synaptic vesicles (Morrow et al. 1998). In recurring cases without clinical clues, genetic workup might be useful to shed some light on the nature and course of the disease. For instance, if patients present with startle reflexes, it may allude to a rather benign variant such as hyperekplexia. A forced flexion of head and legs over the trunk ("Vigevano maneuver") has been reported to stop sudden attacks of hyperekplexia in infancy (Vigevano et al. 1989). Therapeutic management in hyperekplexia might include medication with an allosteric potentiator of the inhibitory GABA A receptor clonazepam in patients with variants in GLRA1 (Tijssen et al. 1997) and SLC6A5 (Bakker et al. 2009;Thomas 2015). The stimulation of P2X purinergic receptors with βγ-methylene adenosine 5 ′ -triphosphate has also been shown to induce the up-regulation of GlyT2 transport activity by increasing total and plasma membrane expression and reducing transporter ubiquitination (Villarejo-López et al. 2017). As a nonharmful therapeutic strategy in our case, maternal soothing was observed as halting hyperekplexia attacks in repeated instances. We report two variants, which are located directly next to each other (see Fig. 1), and confirmed them by subcloning and subsequent plasmid sequencing. Our results confirm that the healthy mother is a carrier of a deletion on position c.1430delC-a heterozygous frameshift variant that has been predicted to be subject to nonsense-mediated decay (NMD) by the NMDEsc Predictor. Because the mother remains asymptomatic, we may argue that the deficient SLC6A5 is partially cleared away by the NMD pathway or does not cause any other dominant negative effect. In a previous report, asymptomatic parents of multiple patients with SLC6A5-related hyperekplexia have been observed with truncating variants (Rees et al. 2006), which is in line with our observation. In addition to this frameshift variant, the patient had a missense mutation (p.Ser477Pro) with a change from serine to proline. Please see Figure 3 for a comparison of our patient's mutation site as well as the missense, nonsense, and frameshift mutations that are discussed in the following. In a previous report of a severely affected individual (Rees et al. 2006), confocal microscopy of transfected HEK293 cells showed that nonmutated EGFP-hGlyT2 was readily expressed at the cell surface, whereas the mutants (p.Tyr377Ter and p.Val432PhefsTer99 among others) appeared to be cytoplasmic and showed no distinct expression at the cell surface with reduced glycine uptake. Furthermore, Rees and colleagues have also shown in two-electrode voltage clamp analysis in Xenopus oocytes that a mutation in p.Trp482Arg did not respond to glycine (up to 10 mM), but was present at the cell surface as demonstrated by sodium-dependent and glycine-insensitive transient currents (see Fig. 3)-that is, the mutants p.Trp482Arg (transmembrane region 6) were functionally inert. In another report on the sodium-binding activity in SLC6A5, the mutational site in our patient, p.Ser477 in transmembrane region 6, was observed to serve as one of eight sodium-binding sites based on an homology model of GlyT2 dDAT (sodium-dependent dopamine transporter, Q7K4Y6, DAT_DROME) and was confirmed experimentally by electrophysiologcal examinations (Benito-Muñoz et al. 2018). In Supplemental Figure S1, we show a multiple sequence alignment of human SLC6A5 between the sodium-dependent dopamine transporter from Drosophila melanogaster (DAT_DROME) and paralog members of the human SLC6 family. This alignment indicates that the residue p.Ser477 is strictly conserved through sodium-and chloride-dependent neurotransmitter transporters, highlighting its importance in sodium binding. Overall secondary structure prediction (jnetpred) demonstrates the location of p.Ser477 in an α-helical section, consistent with its transmembrane location. As expected, no coiled-coil structure was predicted for this section (Lupas et al. 1991). Prediction of solvent accessibility (Jnet Burial) shows a medium exposure of residue p.Ser477. Thus, we conjecture that the missense variant in our patient (c.1429C > T, p.Ser477Pro) serves as a pathogenic mutation on the protein level because of the strict conservation throughout species, the damaging effect of a proline introduction in the α-helix, and the aforementioned electrophysiological observations of deficient sodium binding (Benito-Muñoz et al. 2018). On the basis of these findings and the experimental observations by Rees and colleagues, we hypothesize that the missense mutation (p.Ser477Pro) may lead to deficient metal binding in GlyT2 at transmembrane region 6, which renders it functionally inert. and to reduced expression levels because of the disrupting effect of proline onto the α-helix. The patient's life-threatening apneic attacks first raised suspicions toward congenital hypoventilation as a severe differential diagnosis to hyperekplexia. LBX1 and PHOX2B mutations have previously been shown to impair the development of a small subpopulation of neurons in the medulla oblongata that are essential for respiratory control (Hernandez-Miranda et al. 2018). Other differential diagnoses of neuromuscular symptoms with an infantile onset may stem from "channelopathies" (i.e., a heterogeneous group of disorders resulting from the dysfunction of transmembrane ion channels). These more severe cases comprise patients with mutations in genes that are components of the nonselective sodium leak channel complex (NALCN channelosome) and-depending on the inheritance pattern -either present with muscular hypertonia and distal contractures or hypotonia, psychomotor retardation, and dysmorphic features (Bramswig et al. 2018), as well as patients with mutations in the sodium voltage-gated channel α subunit 4 gene (SCN4A), which may present with congenital myopathy or as congenital myasthenic syndrome (Sloth et al. 2018;Elia et al. 2019). Hyperekplexia patients with a benign phenotype and variants in SLC6A5 are significantly less likely to have recurrent infantile apnea than those with GLRA1 variants (Thomas 2015). Moreover, patients with variants in GLRB and SLC6A5 are more likely to have a developmental delay than those with GLRA1 variants. Thus, an early genetic workup helps in recognizing the patient's symptoms, providing parents with genetic counseling, and avoiding unnecessary medication and its accompanying adverse effects in early postnatal development. Nextgeneration sequencing (NGS) to uncover the underlying cause is indicated because of the broad spectrum of genetic differential diagnosis for apneic attacks and epilepsy-like clinical presentations. Diagnosing rare diseases with NGS in perinatal settings has become highly rapid, economical, and efficient, but it comes with careful consideration of parental consent, ethical framework, and sparing trauma for patients and parents (Daoud et al. 2016;Fazeli et al. 2016;Poulsen et al. 2016;Borghesi et al. 2017;Kuehne et al. 2019). Importantly, receiving a genetic diagnosis might enable physicians to administer specific therapy or at least deter unnecessary drug exposure. In conclusion, we report a novel compound heterozygous variant in SLC6A5 with already well-established symptoms that may have been overlooked initially because of the broad differential diagnoses of apneic attacks. If-as in this case-a variant in SLC6A5 is revealed to cause the disease, the patient is expected to have a benign form of hyperekplexia. We report the first instances of stopping hyperekplexia attacks with maternal soothing, thus the patient could be spared anti-epileptic medication. Because the patients are conscious during attacks, the family members could be informed of strategies to halt the attacks without using medication, such as soothing the patient. Lastly, we may offer genetic counseling to the patient's family regarding the expected ordinary thriving and sensomotoric development in this benign disease course. METHODS Written informed consent was obtained from the parents for genetic investigations and recording and publishing of the disease-related information. The study was approved by the institutional review board of the Ethics Committee of the University Hospital of Cologne. To uncover the genetic cause in this family, we performed Mendeliome sequencing, a commercial gene panel (Illumina TruSight One, Illumina) including 4.813 genes responsible for rare diseases (see Supplemental Tables S1 and S4; Fazeli et al. 2016;Alawbathani et al. 2018). The sequencing was performed on a MiSeq sequencer (Illumina) using the TruSight One chemistry for target extraction (Illumina). Because it was inconclusive in the first analysis, we performed trio whole-exome sequencing, which led to the diagnosis. Reanalysis of the Mendeliome confirmed the results. Genomic DNA samples isolated from peripheral blood of the index patient and his parents were enriched with the NimbleGen SeqCap EZ Human Exome Library v2.0 (Roche) following the manufacturer's instructions. The trio was sequenced on a HiSeq 2000 sequencer (Illumina) with 2 × 101-bp reads, producing a mean coverage of the target regions of 94× for the index patient, 89× for the father, and 147× for the mother (see Supplemental Tables S1 and S5-S7). To confirm the variants in SLC6A5 and to validate the cosegregation within the family, we performed dideoxy sequencing. Using early versions of the Cologne Center for Genomics exome pipeline, the sequencing data of the Mendeliome sequencing was analyzed with version 2.10, and the WES trio and the reanalysis of Mendeliome sequencing data were analyzed with version 2.14, only with differences in technical fixes-for example, activating Ion Torrent and Illumina gene panels, disabling downsampling in variant callers, and various bug fixes in parameter parsing or disk space usage (Kawalia et al. 2015). For further bioinformatics analysis of NGS data, refer to the Methods section in our Supplemental Material. The variants were filtered for a de novo and compound heterozygous inheritance model without consanguine familiar background; with an allele read frequency window of 25%-75%. Variants were considered with a minor allele frequency of 0.1% or less. From a total number of 16 rare functional variants (see Supplemental Table S2), we checked the variants for a quality of >100, nonsynonymous coding, polymorphism predictions. We classified the remaining variants according to the American College of Medical Genetics and Genomics-Association for Molecular Pathology (ACMG-AMP) guidelines and refined Sherloc criteria (Richards et al. 2015;Nykamp et al. 2017). Solely the mentioned variants in SLC6A5 fitted to all of the filter criteria, were classified as pathogenic in both variant classifications, and could be matched to the phenotype of our patient. Table 1 shows the gene variants, according to the ACMG-AMP criteria, and refined Sherloc criteria (Nykamp et al. 2017) in the Richards et al. 2015;Nykamp et al. 2017. The ACMG and Sherloc scores were called for each variant, the ACMG-AMP and Sherloc classification was called together for compound heterozygous variants. Only the SLC6A5 variant was clearly classified as pathogenic. The variants in ABCC6 locate to the nucleotide-binding fold 1 (p.Val787Ile) and the seventh cytoplasmic loop (p.Arg1030Gln), whereas p.Val787Ile has been published in a patient with Pseudoxanthoma elasticum (PXE) leading to strokes due to vascular mineralization. However, ACMG criteria imply uncertain significance for variants and there were no other neurological manifestations, which renders the ABCC6 variants highly unlikely to be the primary cause of the patient's disease. Variants in KIAA0513 have been associated with schizophrenia; however, as they were not reported in combination with seizure-like episodes, a modifying role cannot be excluded. For SLC6A5, the identified homozygous frameshift variant explains the phenotype of hyperekplexia adequately. remaining variants, including four compound-heterozygous variants of uncertain significance in the ATP Binding Cassette Subfamily C Member 6 (ABCC6) gene and KIAA0513 gene, and both SLC6A5 variants we present here, which are highlighted in Supplemental Table S11. To evaluate if a variant was subjected to NMD, we used the NMDEsc Predictor online tool (https ://nmdprediction.shinyapps.io/nmdescpredictor/). In addition to the above-mentioned resources, the variants were also checked in gnomAD (https://gnomad.broadinstitute.org/) for reports in exome-or genome-wide population studies and ClinVar (https://www.ncbi .nlm.nih.gov/clinvar/) for supporting evidence and clinical significance. To confirm the relations of patient and parents, we also performed kinship analysis with varbank by analyzing the proportion of shared rare alleles (see Supplemental Methods and Supplemental Table S3 for further details). The pipeline performed CNV calling for the patient, father, and mother individually, using three different callers: ExomeDepth (Plagnol et al. 2012), XHMM (Fromer et al. 2012), and CoNIFER (Krumm et al. 2012). For more details on CNV calling, please see Supplemental Tables S8-S10 for called de novo CNVs and details about callers. In a next step, we wanted to confirm the allele specificity of the SLC6A5 variants. After a standard PCR procedure to child and mother's samples with HotStar HiFidelity DNA polymerase (QIAGEN) with a proofreading 3 ′ to 5 ′ exonuclease activity, the PCR products were purified to remove proofreading enyzme with the PCR clean-up Gel extraction kit (Macherey-Nagel) according to the protocol. After purification of the PCR product, a 3 ′ A-overhang was added by incubation of the PCR product with a nonproofreading MyTaq DNA polymerase, dNTPs, and MyTaq 1× PCR buffer (BIOLINE) for 10 min at 72°C. The product was ligated into the pCR4-TOPO TA Vector (Invitrogen) according to the manufacturer's protocol for the "TOPO TA Cloning Kit for sequencing." The ligation product was transformed into "One Shot Mach1 Phage-Resistant" Chemically Competent E. coli (Thermo Fisher Scientific). Cells were plated and incubated overnight at 37°C. Several colonies were picked to inoculate 5-mL cultures and incubated overnight at 37°C. Plasmids were purified with the NucleoSpin Plasmid kit (Machery-Nagel). Subsequent dideoxy sequencing of the plasmids insert confirmed the above-mentioned variants in the index patient and his mother (see Fig. 2). ADDITIONAL INFORMATION Data Deposition and Access The pathogenic SLC6A5 variants have been submitted to ClinVar (http://www.ncbi.nlm.nih .gov/clinvar/) and can be found under accession numbers SCV000897641 and SCV000897642. We have no further data to be deposited, because we are not allowed to publish the full exome variant data set based on IRB approval and patient consent. Ethics Statement Informed consent was obtained from the patient and parents for genetic investigations and recording and publishing of the disease-related information. The study was approved by the institutional review board of the Ethics Committee of the University Hospital of Cologne. the Gerok program of the Faculty of Medicine, University of Cologne. We also thank the Regional Computing Center of the University of Cologne (RRZK) for providing computing time for the bioinformatics analyses on the DFG-funded High Performance Computing (HPC) system CHEOPS, as well as for support. Author Contributions H.S.D. analyzed clinical, genetic, and bioinformatic data and wrote the manuscript. A.M. summarized clinical findings. P.H. contributed to the clinical diagnosis, description, and management of the patient. M.K. contributed to the genetic workup and data analysis and revised the manuscript. A.K., R.S., A.M., P.N., S.M., and H.T. contributed the genetic and bioinformatic analysis of the data and revised the manuscript. S.C. obtained funding, analyzed the data, coordinated and supervised the work, and revised the manuscript. All authors approved the manuscript before submission.
#<NAME> import openpyxl def census_data(): cen_data = openpyxl.load_workbook("massachusetts_population_1980-2010.xlsx") cen_sheet = cen_data.get_active_sheet() mass_data = openpyxl.load_workbook("MAEmplyomentData.xlsx") mass_sheet = mass_data.get_active_sheet() for marow in mass_sheet.iter_rows(min_row=2): for cenrow in cen_sheet.iter_rows(min_row=12): ma_city = "" if marow[0].value: ma_city = marow[0].value cen_city = "" if cenrow[3].value: cen_city = cenrow[3].value if ma_city.strip() == cen_city.strip(): nolabor = cenrow[8].value - marow[1].value print(f"{ma_city} has {cenrow[8].value} citizens, with {marow[1].value} " f"laborers, and {nolabor} citizens currently not in the work force") census_data()
<filename>library/src/main/java/com/deltadna/android/sdk/triggers/ExecutionCountBasedTriggerCondition.java package com.deltadna.android.sdk.triggers; import com.deltadna.android.sdk.EventTriggeredCampaignMetricStore; abstract class ExecutionCountBasedTriggerCondition implements TriggerCondition { private long variantId; private EventTriggeredCampaignMetricStore metricStore; ExecutionCountBasedTriggerCondition(long variantId, EventTriggeredCampaignMetricStore metricStore){ this.variantId = variantId; this.metricStore = metricStore; } long getCurrentExecutionCount(){ return metricStore.getETCExecutionCount(variantId); } }
def clean(self): for chart in self.charts.values(): chart.clear() self.update()
// Finds identifier-string at RIGHT side of string. Stops at whitespace. // "1234()=<whatever> IDENTIFIER": "1234()=<whatever>", "IDENTIFIER" std::size_t split_at_tail_identifier(const std::string& s){ auto i = s.size(); while(i > 0 && whitespace_chars.find(s[i - 1]) != std::string::npos){ i--; } while(i > 0 && identifier_chars.find(s[i - 1]) != std::string::npos){ i--; } return i; }
<reponame>Higmin/practise package com.practice.algorithm.other; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; /** * 无序数组中找到左侧比他小右侧比他大的数 * <p> * 要求: 复杂度 小于O(n^2) * * @author Jimmy * @version 1.0, 2021/04/07 * @since practice 1.0.0 */ public class GreaterThanLeftAllAndLessThanRightAll { // 两次遍历可以实现 public static List<Integer> greaterThanLeftAllAndLessThanRightAll(int[] nums) { List<Integer> result = new ArrayList<>(); // 1. 第一次遍历找到满足情况一:左边的所有元素 > 当前元素 的元素集合 // 左边的所有元素 > 当前元素 == 左边的最小值 > 当前元素 Queue<Integer> leftMax = new LinkedList<>(); int min = Integer.MAX_VALUE; for (int i = 0; i < nums.length; i++) { if (min > nums[i]) { leftMax.add(i); } min = Math.min(min, nums[i]); } // 2. 第二次遍历找到满足情况二:当前元素 < 右边的所有元素 的元素集合 // 当前元素 > 右边的所有元素 == 当前元素 > 右边的最大元素 Queue<Integer> rightMin = new LinkedList<>(); int max = Integer.MIN_VALUE; for (int i = nums.length - 1; i >= 0; i--) { if (nums[i] > max) { rightMin.add(i); } max = Math.max(max, nums[i]); } // 3. 求并集 while (!leftMax.isEmpty()) { Integer pop = leftMax.poll(); if (rightMin.contains(pop)) { result.add(nums[pop]); } } return result; } public static void main(String[] args) { System.out.println(greaterThanLeftAllAndLessThanRightAll(new int[]{9, 8, 7, 3, 4, 2, 1}).toString()); System.out.println(greaterThanLeftAllAndLessThanRightAll(new int[]{3, 3, 1}).toString()); } }
<filename>packages/react-components/src/components/SendBox.test.tsx // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. import React from 'react'; import { SendBox } from './SendBox'; import Enzyme from 'enzyme'; import Adapter from 'enzyme-adapter-react-16'; import { mountWithLocalization, createTestLocale } from './utils/testUtils'; import { TextField } from '@fluentui/react'; Enzyme.configure({ adapter: new Adapter() }); describe('SendBox strings should be localizable and overridable', () => { test('Should localize placeholder text', async () => { const testLocale = createTestLocale({ sendBox: { placeholderText: Math.random().toString() } }); const component = mountWithLocalization(<SendBox />, testLocale); expect(component.find(TextField).props().placeholder).toBe(testLocale.strings.sendBox.placeholderText); }); test('Should override button label with `strings` prop', async () => { const testLocale = createTestLocale({ sendBox: { placeholderText: Math.random().toString() } }); const sendBoxStrings = { placeholderText: Math.random().toString() }; const component = mountWithLocalization(<SendBox strings={sendBoxStrings} />, testLocale); expect(component.find(TextField).props().placeholder).toBe(sendBoxStrings.placeholderText); }); });
CLEVELAND, Ohio -- The Cleveland Browns are looking at taking their starting quarterback of the future in the upcoming 2014 NFL draft. While names like Texas A&M's Johnny Manziel, Louisville's Teddy Bridgewater and Central Florida's Blake Bortles have been thrown out as possible picks, it is still unknown which way the Browns are leaning. Hall of Fame quarterback Warren Moon said one advantage for any rookie quarterback coming to Cleveland is the talent that team has on the offensive side of the ball. "It is always important for a young quarterback to come in and have some talent around him to take the pressure off him," Moon said. "With the talent they have in Gordon and Cameron, those guys can make the catches and make it easier for him. "With a solid running game as well for a young quarterback, and all of those guys are very important. Guys like Russell Wilson and Colin Kaepernick had some good teams around them, and they did not feel like they had to go out and throw for 300 yards every week." Moon had quite the playing career during his time, a nine-time Pro Bowler, as well as being named the 1997 Pro Bowl MVP. He threw for 49,325 yards during his time as quarterback for the Houston Oilers, Minnesota Vikings, Seattle Seahawks and the Kansas City Chiefs. He also threw for another 21,228 yards as the quarterback of the Canadian Football League's Edmonton Eskimos. The quarterback getting most of the talk leading up to the draft is Johnny Manziel, who is coming off a great two years as the quarterback for Texas A&M. While Moon loved watching him play in college, he does have some worries for his transition to the NFL. "Johnny Manziel is one of the most exciting college quarterbacks that I have ever watched," Moon said. "The things he was able to do over the past two years were just amazing to watch. I think in the NFL game, some of the things he got away with in college is going to be harder to do because you are playing against a lot better and faster players. "He is going to have to learn the game from the pocket, and still use that ability to get out in the pocket, but not rely on that." Regardless on where any of these future draft picks go, Moon does have one piece of advice for them. "Just work," Moon said. "It is all about working. You have to be the hardest worker on the team. You have to put in more time than anyone else as a young player because you want to earn the respect of your teammates. If you aren't working as hard as they are or if not even harder, you won't get that respect."
<reponame>hdmifish/petal """Commands module for MINECRAFT-RELATED UTILITIES. Access: Server Operators""" import importlib import sys from petal.commands import core from petal.util.minecraft import Minecraft LoadModules = [ "mc_admin", "mc_mod", "mc_public", ] for module in LoadModules: # Import everything in the list above. importlib.import_module("." + module, package=__name__) class CommandsMinecraft(core.Commands): def __init__(self, *a, **kw): super().__init__(*a, **kw) self.minecraft: Minecraft = Minecraft(self.client) self.engines = [] # Load all command engines. for MODULE in LoadModules: # Get the module. self.log.info("Loading {} commands...".format(MODULE.title())) mod = sys.modules.get(__name__ + "." + MODULE, None) if mod: # Instantiate its command engine. cmod = mod.CommandModule(*a, **kw) self.engines.append(cmod) setattr(self, MODULE, cmod) self.log.ready("{} commands loaded.".format(MODULE.title())) else: self.log.warn("FAILED to load {} commands.".format(MODULE.title())) def get_command(self, kword: str): for mod in self.engines: func, submod = mod.get_command(kword) if not func: continue else: return func, (submod or mod) return None, None def get_all(self) -> list: full = [] for mod in self.engines: full += mod.get_all() return full # Keep the actual classname unique from this common identifier # Might make debugging nicer CommandModule = CommandsMinecraft
/** * Create login response with filled token and authorities. * * @param loginDto login request * @param token cidmst token * @return login response */ private LoginDto login(LoginDto loginDto, IdmTokenDto token) { IdmJwtAuthentication authentication = jwtTokenMapper.fromDto(token); oauthAuthenticationManager.authenticate(authentication); LoginDto result = new LoginDto(); result.setUsername(loginDto.getUsername()); result.setSkipMustChange(loginDto.isSkipMustChange()); result.setPassword(loginDto.getPassword()); result.setAuthenticationModule(token.getModuleId()); IdmJwtAuthenticationDto authenticationDto = jwtTokenMapper.toDto(token); result.setAuthentication(authenticationDto); result.setToken(jwtTokenMapper.writeToken(authenticationDto)); result.setAuthorities(jwtTokenMapper.getDtoAuthorities(token)); return result; }
def _read(self, addr, size, onDone=None): bus = self._bus._ag burstsize = 1 bus.req.append((READ, addr, burstsize, None, None)) if onDone: raise NotImplementedError() if self._read_listener is None: self._read_listener = HandshakedReadListener(self._bus.r._ag) self._read_listener.register(self._r_planed_words_cnt, onDone)
The Jerusalem police arrested six men on Monday on suspicion on involvement in the brutal attack on 21-year-old Druze student Tommy Hassoun last week. The suspects were brought Jerusalem Magistrate's Court for a hearing on the extension of their remand. Follow Ynetnews on Facebook and Twitter Tommy Hassoun was brutally attacked by a group of Jewish men in Jerusalem on Thursday – reportedly after they heard him speaking Arabic. What Tommy Hassoun's assailants did not know was that they were attacking a young man who had completed his IDF service three months earlier and had recently moved to Jerusalem to study music. Tommy Hassoun after the attack. The men, who were wearing skullcaps, hit the 21-year-old Druze student and broke a glass bottle on him. Hassoun was hospitalized suffering from bruises to his face and back of his head. Related articles: Hassoun, a musician who grew up in the Druze town of Daliyat al-Karmel near Haifa, had moved to Jerusalem to study at a music academy. For his military service, Hasson had served in a combat unit and later at the President's Residence. On Sunday, Hassoun posted a message on his Facebook preaching peace and tolerance – despite his ordeal. “Many people who have never met me asked forgiveness," wrote Hassoun. “I am in a lot of pain, but my head is held high." President Reuven Rivlin, who knew Hassoun from his IDF service, called the young man's father to express support for the family. Hassoun with Rivlin during IDF service. (Photo: President's Residence Spokesman) On a personal level I believed up until now that this is one nation – I never saw a difference between a Jew and a Druze," said Hassoun's father. "I believed and I will continue to believe in the future that this is the land of the Jewish nation – it has a right to live here." Tommy's brother, Julian, said: "A month ago two Druze police officers were murdered during terror attacks and now a Druze gets hit by Jews."
<reponame>huanghong1125/vue-element-plus-admin import { useAxios } from '@/hooks/web/useAxios' import type { TableData } from './types' const { request } = useAxios() export const getTableListApi = ({ params }: AxiosConfig) => { return request<{ total: number list: TableData[] }>({ url: '/example/list', method: 'get', params }) } export const saveTableApi = ({ data }: AxiosConfig<Recordable, TableData>) => { return request({ url: '/example/save', method: 'post', data }) } export const getTableDetApi = ({ params }: AxiosConfig< { id: string }, Recordable >) => { return request<TableData>({ url: '/example/detail', method: 'get', params }) } export const delTableListApi = ({ data }: AxiosConfig< Recordable, { id: string[] | number[] } >) => { return request({ url: '/example/delete', method: 'post', data }) }
Stability of lipid vesicles in tissues of the mouse: a gamma-ray perturbed angular correlation study. The rate of phospholipid vesicle disruption in specific tissues of the mouse was followed by gamma-ray perturbed angular correlation (PAC) spectroscopy. In these studies, high levels of 111In-nitrilotriacetic acid complex are contained in unilamellar vesicles consisting of distearoyl phosphatidylcholine, cholesterol, and small amounts of other lipids which modify the surface properties. The PAC technique monitors the extent of vesicle breakup by measuring a time-integrated perturbation factor, less than G22 (infinity) greater than. As the vesicles are broken open in vivo, the released 111In3+ ions quickly bind to macromolecules and the less than G22 (infinity) greater than value decreases substantially. After administration of vesicles by various routes (intravenous, intraperitoneal, subcutaneous, and oral), the radioactivity and less than G22 (infinity) greater than values were determined for several tissues at intervals up to 24 hr. We conclude from these data that (i) the PAC technique in conjunction with standard gamma counting methods provides unique information on the condition and location of vesicles in specific tissues, (ii) significant differences in vesicle integrity are found in various tissues, and (iii) both the means of administration and the presence of surface charge affect the vesicle stability and distribution. The carbohydrate analogues of cholesterol affect vesicle stability but not distribution.
Flow at the Tip of a Forward Curved Centrifugal Fan The velocity profiles, radial and circumferential components, were measured at the tip of a forward curved centrifugal fan. Three sets of measurements are presented. Two at peak efficiency for different rotational speeds and a third at the lower rotational speed and for a reduced flow rate.A reverse flow region was formed near the hub, and almost in the middle, between pressure and suction sides of the blade. Near the shroud a high velocity region was observed and a low one near the suction side, picture similar to the jet-wake structure found in the literature.At the lower rotational speed and low flow rate the flow was affected mainly by the system rotation. A “wake” was formed along the suction side of the blade. Increasing the flow rate blade curvature effects became more dominant.Increasing the rotational speed and for the same flow coefficient the system of two vortices observed in the previous case disappears and a single vortex takes its place. In this case the wake is positioned on the hub.Corner vortices also affect the main flow by changing the turbulence intensities. A corner vortex observed on the pressure side reduced the turbulence intensities in the region and a wake was formed locally. However, another corner vortex on the suction side caused an increase to the local turbulence intensities and consequently a high local velocity.Copyright © 1984 by ASME
def bin_avg(t, s, nbins=None, norm=True, normy=False): if norm: t = normalize_t(t) if isinstance(t, list): f_t = np.hstack(t).flatten() else: f_t = t.flatten() if isinstance(s, list): f_s = np.hstack(s).flatten() else: f_s = s.flatten() if nbins is None: nbins = np.min([len(_t) for _t in t]) t_bin = np.linspace(np.min(f_t), np.max(f_t), nbins+1) s_bin = np.zeros(t_bin.shape) else: t_bin = np.linspace(np.min(f_t), np.max(f_t), nbins+1) s_bin = np.zeros(nbins+1) for i in range(nbins): in_bin_i = np.nonzero(np.logical_and(t_bin[i] <= f_t, f_t < t_bin[i+1])) s_bin[i] = np.mean(f_s[in_bin_i]) if normy == 'max': return t_bin, (s_bin - np.min(s_bin))/(np.max(s_bin)-np.min(s_bin)) elif normy == 'area': return t_bin, (s_bin)/np.trapz(s_bin, t_bin) else: return t_bin, s_bin
package main import ( "bufio" "chittyChatpb/chittyChatpb" "context" "flag" "fmt" "io" "log" "os" "strconv" "sync" "github.com/thecodeteam/goodbye" "google.golang.org/grpc" ) var channelName = flag.String("channel", "default", "Channel name") var senderName = flag.String("sender", "default", "Sender name") var tcpServer = flag.String("server", ":8080", "TCP server") var LamportTime lamportTime type lamportTime struct { time int *sync.Mutex } func (lt *lamportTime) max(otherValue int) int { if lt.time > otherValue { return lt.time } return otherValue } func (lt *lamportTime) update(otherValue int) { lt.Lock() lt.time = lt.max(otherValue) + 1 lt.Unlock() } func (lt *lamportTime) incrementWithOne() { lt.Lock() lt.time++ lt.Unlock() } func joinChannel(ctx context.Context, client chittyChatpb.ChittyChatClient) { channel := chittyChatpb.Channel{Name: *channelName, SendersName: *senderName} stream, err := client.JoinChannel(ctx, &channel) if err != nil { log.Fatalf("client could not join channel: %v", err) } fmt.Printf("Joined channel: %v \n", *channelName) sendMessage(ctx, client, "Participant "+*senderName+" joined Chitty-Chat") waitc := make(chan struct{}) go func() { for { in, err := stream.Recv() if err == io.EOF { close(waitc) return } if err != nil { log.Fatalf("Failed to recieve message: %v \n", err) } LamportTime.update(int(in.Time)) log.Printf("I %v got message: %v \n", *senderName, in) fmt.Printf("(%v): %v - Lamport time: "+strconv.Itoa(LamportTime.time)+"\n", in.Sender, in.Message) } }() <-waitc } func sendMessage(ctx context.Context, client chittyChatpb.ChittyChatClient, message string) { stream, err := client.SendMessage(ctx) if err != nil { log.Printf("Cant send message: %v", err) } LamportTime.incrementWithOne() msg := chittyChatpb.Message{ Channel: &chittyChatpb.Channel{ Name: *channelName, SendersName: *senderName, }, Message: message, Time: int32(LamportTime.time), Sender: *senderName, } log.Printf("I %v sent a message: %v \n", *senderName,&msg) stream.Send(&msg) stream.CloseAndRecv() } func clearCurrentLine() { fmt.Print("\n\033[1A\033[K") } func main() { LOG_FILE := "./logfile" logFile, err := os.OpenFile(LOG_FILE, os.O_APPEND|os.O_RDWR|os.O_CREATE, 0644) if err != nil { log.Panic(err) } // Set log out put and enjoy :) log.SetOutput(logFile) log.SetFlags(log.Lmicroseconds) LamportTime = lamportTime{0, new(sync.Mutex)} flag.Parse() var opts []grpc.DialOption opts = append(opts, grpc.WithBlock(), grpc.WithInsecure()) conn, err := grpc.Dial(*tcpServer, opts...) if err != nil { log.Fatalf("Fail to dial(connect): %v", err) } ctx := context.Background() client := chittyChatpb.NewChittyChatClient(conn) defer goodbye.Exit(ctx, -1) goodbye.Notify(ctx) goodbye.RegisterWithPriority(func(ctx context.Context, sig os.Signal) { sendMessage(ctx, client, "Participant "+*senderName+" left Chitty-Chat") }, 1) goodbye.RegisterWithPriority(func(ctx context.Context, sig os.Signal) { conn.Close() }, 5) goodbye.RegisterWithPriority(func(ctx context.Context, sig os.Signal) { logFile.Close() }, 4) go joinChannel(ctx, client) scanner := bufio.NewScanner(os.Stdin) for scanner.Scan() { if len(scanner.Text()) > 0 && len(scanner.Text()) < 128 { go sendMessage(ctx, client, scanner.Text()) } else { log.Printf("I %v denied a message, due to it being to long or no message at all. \n", *senderName) fmt.Println("Message has to be between 1 and 128 chars") continue } clearCurrentLine() } }
<reponame>diegoaltx/drone-ecs-deploy import boto3 import botocore import os import sys def get_client(access_key_id=None, secret_access_key=None, region=None): session = boto3.session.Session( aws_access_key_id=access_key_id, aws_secret_access_key=secret_access_key, region_name=region ) return session.client('ecs') def exit_with_error(message, *args): print('Something went wrong:', message.format(*args), file=sys.stderr) sys.exit(1) def update_service(client, cluster, service): client.update_service(cluster=cluster, service=service, forceNewDeployment=True) def wait_service_stable(client, cluster, service): waiter = client.get_waiter('services_stable') waiter.wait(cluster=cluster, services=[service], WaiterConfig={'Delay': 15, 'MaxAttempts': 40}) def deploy_service(): access_key_id = os.getenv('PLUGIN_ACCESS_KEY_ID') secret_access_key = os.getenv('PLUGIN_SECRET_ACCESS_KEY') region = os.getenv('PLUGIN_REGION') cluster = os.getenv('PLUGIN_CLUSTER') service = os.getenv('PLUGIN_SERVICE') client = get_client(access_key_id, secret_access_key, region) try: print('Updating service "{0}" on cluster "{1}"...'.format(service, cluster)) update_service(client, cluster, service) print('Service updated. Waiting for service to be stable...') wait_service_stable(client, cluster, service) print('Service is stable now. All done.') except client.exceptions.ClusterNotFoundException: exit_with_error('The cluster "{0}" does not exist.', cluster) except client.exceptions.ServiceNotFoundException: exit_with_error('The service "{0}" does not exist on cluster "{1}".', service, cluster) except botocore.exceptions.WaiterError as error: exit_with_error('Cannot confirm service stability. {0}.', error) if __name__ == '__main__': deploy_service()
package speedtest import ( "crypto/rand" "fmt" "github.com/dustin/go-humanize" "log" "math" "net" "time" ) type BytesPerTime struct { Bytes uint64 Duration time.Duration } func SpeedMeter(input chan BytesPerTime, bytesPerSec chan BytesPerTime) { go func() { bpt := BytesPerTime{} for { select { case newBpt := <-input: bpt.Bytes += newBpt.Bytes bpt.Duration += newBpt.Duration case bytesPerSec <- bpt: bpt = BytesPerTime{} } } }() } func SpeedReporter(input chan BytesPerTime, interval time.Duration) { go func() { for { select { case <-time.After(interval): bpt, ok := <-input if !ok { log.Println("Reporting stopped") return } if bpt.Duration.Seconds() != 0 { log.Printf("Throughput: %s/s", humanize.IBytes(uint64(math.Ceil(float64(bpt.Bytes)/bpt.Duration.Seconds())))) } else { log.Println("No throughput") } } } }() } func SendData(conn net.Conn, buffersize int, reportCh chan BytesPerTime) error { buffer := make([]byte, buffersize) read, err := rand.Read(buffer) if err != nil { return fmt.Errorf("Error while initialising buffer: %s", err) } if read != buffersize { return fmt.Errorf("Could not init buffer: %d read", read) } for { startTime := time.Now() w, err := conn.Write(buffer) if err != nil { return fmt.Errorf("Error while writing: %s", err) } reportCh <- BytesPerTime{ Bytes: uint64(w), Duration: time.Since(startTime), } } } func ReceiveData(conn net.Conn, buffersize int, reportCh chan BytesPerTime) error { b := make([]byte, buffersize) defer conn.Close() for { startTime := time.Now() w, err := conn.Read(b) if err != nil { return fmt.Errorf("Read: %d, Error: %s\n", w, err) } reportCh <- BytesPerTime{ Bytes: uint64(w), Duration: time.Since(startTime), } } }
<reponame>feng3d-labs/feng3d<filename>packages/polyfill/src/Types.ts /** * 构造函数 */ export type Constructor<T> = (new (...args: any[]) => T); /** * 让T中以及所有键值中的所有键都是可选的 */ export type gPartial<T> = { [P in keyof T]?: gPartial<T[P]>; }; /** * 获取T类型中除值为KT类型以外的所有键 * * ``` * class A * { * a = 1; * f(){} * } * * var a: NonTypePropertyNames<A, number>; //var a:"f" * var a1: NonTypePropertyNames<A, Function>; //var a:"a" * * ``` */ export type NonTypePropertyNames<T, KT> = { [K in keyof T]: T[K] extends KT ? never : K }[keyof T]; /** * 剔除T类型中值为KT类型的键 * ``` * class A * { * a = 1; * f(){} * } * * var a: NonTypePropertys<A, number>; //var a: Pick<A, "f"> * var a1: NonTypePropertys<A, Function>; //var a1: Pick<A, "a"> * ``` */ export type NonTypePropertys<T, KT> = Pick<T, NonTypePropertyNames<T, KT>>; /** * 选取T类型中值为KT类型的所有键 * * ``` * class A * { * a = 1; * f(){} * } * * var a: TypePropertyNames<A, number>; //var a: "a" * var a1: TypePropertyNames<A, Function>; //var a1: "f" * ``` */ export type TypePropertyNames<T, KT> = { [K in keyof T]: T[K] extends KT ? K : never }[keyof T]; /** * 选取T类型中值为非函数类型的所有键 */ export type PropertyNames<T> = NonTypePropertyNames<T, Function>; /** * 选取T类型中值为函数的所有键 * * ``` * class A * { * a = 1; * f(){} * } * * var a: FunctionPropertyNames<A>; //var a: "f" * ``` */ export type FunctionPropertyNames<T> = TypePropertyNames<T, Function>; /** * 选取T类型中值为KT类型的键 * * ``` * class A * { * a = 1; * f() { } * } * * var a: TypePropertys<A, number>; //var a: Pick<A, "a"> * var a1: TypePropertys<A, Function>; //var a1: Pick<A, "f"> * ``` */ export type TypePropertys<T, KT> = Pick<T, TypePropertyNames<T, KT>>; export type Lazy<T> = T | (() => T); export type LazyObject<T> = { [P in keyof T]: Lazy<T[P]>; }; export const lazy = { getvalue: function <T>(lazyItem: Lazy<T>): T { if (typeof lazyItem === "function") return (lazyItem as any)(); return lazyItem; } }; /** * 可销毁对象 */ export interface IDisposable { /** * 是否已销毁 */ readonly disposed: boolean; /** * 销毁 */ dispose(): void; }
/** * Returns the smallest integer box that can contain the given box. * * @param b The box to contain. * @return A new box. */ public static final Box3i contain(Box3 b) { Box3i result = new Box3i(); contain(b, result); return result; }
#include <bits/stdc++.h> using namespace std; typedef long long ll; typedef long long int lli; typedef long double ld; typedef unsigned long long int ull; typedef vector<int> vi; typedef vector<pair<int, int>> vii; typedef vector<ll> vll; typedef pair<int, int> pii; #define fast \ ios_base::sync_with_stdio(false); \ cin.tie(NULL); \ cout.tie(NULL) #define pb push_back #define all(v) v.begin(), v.end() #define tr(i, c) for (auto i : c) #define f(i, a, n) for (ll i = a; i < (n); i++) #define fm(i, a, b) for (ll i = (a); i >= b; i--) #define read(arr, a, n) f(i, a, n) cin >> arr[i]; #define show(arr, a, n) f(i, a, n) cout << arr[i] << " "; #define tcsolve() \ int tcs; \ cin >> tcs; \ f(i, 0, tcs) solve(); #define ordered_set tree<int, null_type, less<int>, rb_tree_tag, tree_order_statistics_node_update> lli min(lli a, lli b) { return (a > b) ? b : a; } lli max(lli a, lli b) { return (a > b) ? a : b; } ll mod = 1000000007; void solve() { ll n; cin>>n; string s; int p=1; n++; while(p==1) { s=to_string(n); if(s[0]!=s[1] and s[1]!=s[2] and s[2]!=s[3] and s[0]!=s[2] and s[0]!=s[3] and s[1]!=s[3]) p=0; else n++; } cout<<n<<"\n"; } int main() { fast; solve(); #ifndef ONLINE_JUDGE cout << "Time Elapsed: " << 0.01 * clock() / CLOCKS_PER_SEC << "sec"; #endif return 0; }
Early Temporal Variation of Cerebral Metabolites After Human Stroke: A Proton Magnetic Resonance Spectroscopy Study Background and Purpose Proton magnetic resonance spectroscopy has documented declines in normal metabolites and long-term elevation of lactate signal after stroke in humans. Within days of stroke, leukocytes infiltrating the infarct zone may produce much of the lactate seen in the subacute and chronic periods. Methods We examined 10 patients by localized proton magnetic resonance spectroscopy with one-dimensional spectroscopic imaging within the first 60 hours after acute nonhemorrhagic cerebral infarction, a period before abundant leukocyte infiltration. Follow-up studies on day 8 to 17 after stroke were performed on 7 of these patients. Results Initially, the lactate magnetic resonance signal was elevated in all patients. The N-acetyl-aspartate peak within the lesion was reduced below contralateral normal brain in all but two. At subsequent examination, significant declines had occurred in lesion maximum lactate and N-acetyl-aspartate signals, with average changes of −36 ± 11% per week and −29 ± 9% per week, respectively. Declines in lesion creatine/phosphocreatine and in choline-containing compound peaks occurred in some patients but did not attain statistical significance for the group as a whole. Estimated lesion volume correlated positively with both total (r=.75, P=.012) and lesion maximum (r=.74, P=.015) lactate signal. Conclusions Elevated lactate signal is reliably detectable by magnetic resonance spectroscopy after acute cerebral infarction in humans. Clearance of lactate occurs despite the potential contribution of lactate-producing leukocytes in the subacute stage. Delayed loss of N-acetyl-aspartate signal in second examinations suggests that late death of viable cells may occur within the first 2 weeks after cerebral infarction.
import { ModifierArg, ModifierState } from '../base'; import { RestrictOptions } from './pointer'; export interface RestrictEdgesOptions { inner: RestrictOptions['restriction']; outer: RestrictOptions['restriction']; offset?: RestrictOptions['offset']; endOnly: boolean; enabled?: boolean; } export declare type RestrictEdgesState = ModifierState<RestrictEdgesOptions, { inner: Interact.Rect; outer: Interact.Rect; offset: RestrictEdgesOptions['offset']; }>; declare function start({ interaction, startOffset, state }: ModifierArg<RestrictEdgesState>): void; declare function set({ coords, edges, interaction, state }: ModifierArg<RestrictEdgesState>): void; declare const restrictEdges: { noInner: { top: number; left: number; bottom: number; right: number; }; noOuter: { top: number; left: number; bottom: number; right: number; }; start: typeof start; set: typeof set; defaults: RestrictEdgesOptions; }; export default restrictEdges;
WASHINGTON — FBI Director James Comey wrote in a memo that President Donald Trump had asked him to shut down an FBI investigation into ousted national security adviser Michael Flynn, a person familiar with the situation told The Associated Press Tuesday. The person had seen the memo but was not authorized to discuss it by name and spoke on condition of anonymity. The existence of the memo was first reported Tuesday by The New York Times. The White House denied the report. “While the President has repeatedly expressed his view that General Flynn is a decent man who served and protected our country, the President has never asked Mr. Comey or anyone else to end any investigation, including any investigation involving General Flynn,” the White House said in a statement. Trump abruptly fired Comey last week, saying he did so based on his very public handling of the Hillary Clinton email probe. But the White House has provided differing accounts of the firing. And lawmakers have alleged that the sudden ouster was an attempt to stifle the bureau’s investigation into Trump associates’ ties to Russia’s alleged meddling in the 2016 presidential election. LISTEN: Unauthorized leaks are ‘frankly dangerous,’ Spicer says Comey’s memo detailing his conversation with Trump would be the clearest proof to date that the president has tried to influence that investigation. The Times said it was part of a paper trail Comey created documenting what he saw as Trump’s efforts to improperly interfere in the ongoing probe. The Justice Department declined to comment. Comey’s memo detailing his conversation with Trump would be the clearest proof to date that the president has tried to influence that investigation. The Justice Department declined to comment. According to the Times, Comey wrote in the February memo that Trump told him Flynn had done nothing wrong. But Comey did not say anything to Trump about limiting the investigation, replying, “I agree he is a good guy.” The newspaper said Comey was in the Oval Office that day with other national security officials for a terrorism threat briefing. When that ended, Trump asked everyone to leave except Comey, and he eventually turned the conversation to Flynn. On Tuesday, for the second night in a row, Senate Republicans and Democrats were caught off-guard as they entered the chamber for a scheduled vote. Sen. Burr on Comey memo: ‘Somebody is going to have to do more than just have anonymous sources’ “I don’t know the facts, so I really want to wait until I find out what the facts are before commenting,” Sen. John Cornyn, R-Texas, told reporters. Asked if it would be obstructing justice for Trump to have asked Comey to drop the Flynn investigation, Cornyn said: “You know, that’s a very serious charge. I wouldn’t want to answer a hypothetical question.” Sen. Lindsey Graham, R-S.C., emphatically said he’s not commenting on news stories anymore. “Let’s get to the bottom of what happened with the director. And the best way to get to the bottom of it, is for him to testify. … I’m not going to take a memo, I want the guy to come in,” Graham told reporters, adding, “If he felt confident enough to write it down, he should come in and tell us about it.” Sen. Dianne Feinstein, D-Calif., said Comey needs to come to Capitol Hill and testify. Sen. Mark Warner of Virginia, the top Democrat on the Senate Intelligence Committee, said he will ask Comey for additional material as part of the panel’s investigation. “Memos, transcripts, tapes — the list keeps getting longer,” he said. Democratic Sen. Chris Murphy of Connecticut tweeted: “Just leaving Senate floor. Lots of chatter from Ds and Rs about the exact definition of ‘obstruction of justice.'” There is no sign the FBI’s Russia investigation is closing. Acting FBI Director Andrew McCabe told Congress last week the investigation is “highly significant” and said Comey’s dismissal would do nothing to impede the probe.
// Parses the command line arguments and reads the config file if specified func ProcessConfig() (params Params, err error) { var iniConf IniConfig params = parseCommandLine() iniConf, err = parseConfigFile(&params) if err != nil { return } if params.ApiKey == "" { params.ApiKey = iniConf.Get("Auth", "key") } if params.ApiUser == "" { params.ApiUser = iniConf.Get("Auth", "username") } if params.Container == "" { params.Container = iniConf.Get("CloudFiles", "container") } if params.Region == "" { params.Region = iniConf.Get("CloudFiles", "region") } params.Region = strings.ToUpper(params.Region) if params.CnameHost == "" { params.CnameHost = iniConf.Get("CloudFiles", "cname") } err = checkConfig(&params) return }
import formatterService from './formatter.service'; describe('formatterService', () => { describe('formatExpirationDate', () => { it('should return an empty string when value is empty', () => { const actual = formatterService.formatExpirationDate(''); expect(actual).toBe(''); }); it('should format the expiration date as mm/yy when the given format is mmyy', () => { const actual = formatterService.formatExpirationDate('0823'); expect(actual).toBe('08/23'); }); it('should format the expiration date as mm/yy when the given format is yyyy-mm', () => { const actual = formatterService.formatExpirationDate('2023-08'); expect(actual).toBe('08/23'); }); it('should format the expiration date as mm/yy when the given format is mm-yyyy', () => { const actual = formatterService.formatExpirationDate('08-2023'); expect(actual).toBe('08/23'); }); it('should format the expiration date as mm/yy when the given format is mm-yy', () => { const actual = formatterService.formatExpirationDate('08-23'); expect(actual).toBe('08/23'); }); }); describe('formatPan', () => { it('should return an empty string when value is empty', () => { const actual = formatterService.formatPan(''); expect(actual).toBe(''); }); it('should format pan in 4 number groups', () => { const actual = formatterService.formatPan('4242424242424242'); expect(actual).toBe('4242 4242 4242 4242'); }); it('should remove any character other than numbers', () => { const actual = formatterService.formatPan('US42 4242 4242 4242 42&'); expect(actual).toBe('4242 4242 4242 4242'); }); }); describe('sanitize2FACode', () => { it('should return empty string if passed code is null', () => { const actual = formatterService.sanitize2FACode(null); expect(actual).toBe(''); }); it('should return empty string if passed code is empty string', () => { const actual = formatterService.sanitize2FACode(''); expect(actual).toBe(''); }); it('should remove dashes and other non-numerical characters from the string', () => { let actual = formatterService.sanitize2FACode('123-456'); expect(actual).toBe('123456'); actual = formatterService.sanitize2FACode('123 456'); expect(actual).toBe('123456'); actual = formatterService.sanitize2FACode('Gh&$12 3! 456'); expect(actual).toBe('123456'); }); }); });
<reponame>bmellstrom/zephyr-on-adafruit-clue /* * SPDX-License-Identifier: Apache-2.0 */ #include <zephyr.h> #include <device.h> #include <devicetree.h> #include <drivers/display.h> #include <drivers/gpio.h> #include <sys/printk.h> #include <usb/usb_device.h> #if DT_NODE_HAS_STATUS(DT_ALIAS(led0), okay) #define LED_NODE DT_ALIAS(led0) #define LED_LABEL DT_GPIO_LABEL(LED_NODE, gpios) #define LED_PIN DT_GPIO_PIN(LED_NODE, gpios) #define LED_FLAGS DT_GPIO_FLAGS(LED_NODE, gpios) #else #error "LED not defined :(" #endif #if DT_NODE_HAS_STATUS(DT_NODELABEL(backlight0), okay) #define BACKLIGHT_NODE DT_NODELABEL(backlight0) #define BACKLIGHT_LABEL DT_GPIO_LABEL(BACKLIGHT_NODE, gpios) #define BACKLIGHT_PIN DT_GPIO_PIN(BACKLIGHT_NODE, gpios) #define BACKLIGHT_FLAGS DT_GPIO_FLAGS(BACKLIGHT_NODE, gpios) #else #error "Backlight not defined :(" #endif #if DT_NODE_HAS_STATUS(DT_INST(0, sitronix_st7789v), okay) #define DISPLAY_NODE DT_INST(0, sitronix_st7789v) #define DISPLAY_LABEL DT_LABEL(DISPLAY_NODE) #define DISPLAY_WIDTH DT_PROP(DISPLAY_NODE, width) #define DISPLAY_HEIGHT DT_PROP(DISPLAY_NODE, height) #define DISPLAY_PIXELFORMAT PIXEL_FORMAT_RGB_565 /* Determined by the 'colmod' property in the DTS. */ #else #error "Display not defined :(" #endif #if DISPLAY_PIXELFORMAT != PIXEL_FORMAT_RGB_565 #error "Code below assumes color format RGB 565" #endif enum { Color_Red = 0xF800, Color_Green = 0x03E0, Color_Blue = 0x001F, Color_White = 0xFFFF, Color_Black = 0x0000, }; typedef uint16_t Color_t; static Color_t _frameBuffer[DISPLAY_WIDTH*DISPLAY_HEIGHT]; static const struct display_buffer_descriptor _frameBufferDesc = { .buf_size = sizeof(_frameBuffer), .width = DISPLAY_WIDTH, .height = DISPLAY_HEIGHT, .pitch = DISPLAY_WIDTH, }; static void _fill_rect(uint16_t x, uint16_t y, uint16_t width, uint16_t height, Color_t color) { for (unsigned cy = y; cy < y + height; cy++) { for (unsigned cx = x; cx < x + width; cx++) { unsigned pos = (cy*DISPLAY_WIDTH + cx); _frameBuffer[pos] = color; } } } static void _update_display(const struct device *dev) { display_write(dev, 0, 0, &_frameBufferDesc, _frameBuffer); } void main(void) { const struct device *led_dev; const struct device *backlight_dev; const struct device *display_dev; int ret; if (usb_enable(NULL)) { return; /* usb_enable() failed */ } led_dev = device_get_binding(LED_LABEL); if (led_dev == NULL) { printk("led not found\n"); return; } backlight_dev = device_get_binding(BACKLIGHT_LABEL); if (led_dev == NULL) { printk("backlight not found\n"); return; } display_dev = device_get_binding(DISPLAY_LABEL); if (display_dev == NULL) { printk("display not found\n"); return; } ret = gpio_pin_configure(led_dev, LED_PIN, GPIO_OUTPUT_ACTIVE | LED_FLAGS); if (ret < 0) { return; } ret = gpio_pin_configure(backlight_dev, BACKLIGHT_PIN, GPIO_OUTPUT_ACTIVE | BACKLIGHT_FLAGS); if (ret < 0) { return; } /* Initialize the screen with some rectangles */ _fill_rect(DISPLAY_WIDTH-50, 0, 50, 50, Color_Green); /* Top right */ _fill_rect(0, DISPLAY_HEIGHT-50, 50, 50, Color_Red); /* Bottom left */ _fill_rect(DISPLAY_WIDTH-50, DISPLAY_HEIGHT-50, 50, 50, Color_Blue); /* Bottom right */ _update_display(display_dev); display_blanking_off(display_dev); gpio_pin_set(backlight_dev, BACKLIGHT_PIN, 1); /* Turn on backlight */ bool led_on = false; while (1) { led_on = !led_on; gpio_pin_set(led_dev, LED_PIN, led_on); _fill_rect(0, 0, 50, 50, led_on ? Color_White : Color_Black); _update_display(display_dev); k_msleep(500); } }
Study on the Output Low-Level Property of the SN74LS00N Chip Generally, the load capacity of the integrated gate circuit depends on its output low-level property. This paper gives a theoretical and experimental study on the output low-level volt-ampere property of the current popular TTL NAND-gate SN74LS00N, and the true output property model and reasonable parameters that are considered to be inconsistent with those in relevant classical textbooks. In addition, with the actual measurement of the change of the power supply current of each gate under different logic states, the method and specific values of the key circuit parameters of the internal part of the integrated circuit—the input stage resistor R1 and the inverting stage pull-up resistor R2—are given. This will play an active role in understanding the TTL 74LS series of electronic device circuits and their experimental teaching, related electronic engineering design, as well as the writing, publishing and teaching of the textbooks. Introduction The explanation of the TTL gate circuit is the popular and necessary content of the teaching of "Digital Electronic Technology" in Chinese universities , but most of the textbooks only explain the so-called "standard 74 TTL series" gate circuit that has long been outdated, although some also mention that the 74LS series of gates have been in use for more than 50 years but are still widely used. Therefore, it is rare to focus on the 74LS series of gates in the textbooks, and naturally it is inevitable that there is no detail description on 74LS gates . This paper measures the output low-level volt-ampere property curve VOL-IOL of the TTL "four NAND gate" SN74LS00N gate circuit (manufactured by HLF, Huilyf, China) with high level inputs when the output is unloaded, which is supplemented by the measured power supply current ICC of each gate under different logic states. This report gives the theoretical explanation of the volt-ampere property curve VOL-IOL, and strives to enrich all the contents of the "Digital Electronic Technology" textbooks explaining the 74LS00 gate circuit. The Principle Circuit of the 74LS00 NAND Gate The principle circuit of 74LS00 NAND gate is shown in Figure 1 . The figure shows the theoretical design values of the involved various resistors R1, 2, 4, 5, 6, and RB, C. This article does not cover its full voltage transfer property VO-VI, input volt-ampere property VI-II (this property at the input low-level state with no negative voltage is equivalent to the input load property), output high-level property VOH-IOH, fan-out performance, noise tolerance, etc. Here, only the circuit parameters (the resistor R1 at the input stage and the pull-up resistor R2 at the inverting stage) and the overall low-level-output property of the internal part of the integrated circuit are discussed, so the values of R1, 2 are the most important in this paper, and assuming that the turn-on voltage VD of the Schottky diode D1, 2 is constant at 0.3 V, the emitter junction of the anti-saturation Schottky transistor T2, 5 is sufficiently conductive with a bias voltage of VBE = 0.7 V. Introduction The explanation of the TTL gate circuit is the popular and necessary content of the teaching of "Digital Electronic Technology" in Chinese universities , but most of the textbooks only explain the so-called "standard 74 TTL series" gate circuit that has long been outdated, although some also mention that the 74LS series of gates have been in use for more than 50 years but are still widely used. Therefore, it is rare to focus on the 74LS series of gates in the textbooks, and naturally it is inevitable that there is no detail description on 74LS gates . This paper measures the output low-level volt-ampere property curve V OL -I OL of the TTL "four NAND gate" SN74LS00N gate circuit (manufactured by HLF, Huilyf, China) with high level inputs when the output is unloaded, which is supplemented by the measured power supply current I CC of each gate under different logic states. This report gives the theoretical explanation of the volt-ampere property curve V OL -I OL , and strives to enrich all the contents of the "Digital Electronic Technology" textbooks explaining the 74LS00 gate circuit. The Principle Circuit of the 74LS00 NAND Gate The principle circuit of 74LS00 NAND gate is shown in Figure 1 . The figure shows the theoretical design values of the involved various resistors R 1, 2, 4, 5, 6 , and R B, C . This article does not cover its full voltage transfer property V O -V I , input volt-ampere property V I -I I (this property at the input low-level state with no negative voltage is equivalent to the input load property), output high-level property V OH -I OH , fan-out performance, noise tolerance, etc. Here, only the circuit parameters (the resistor R 1 at the input stage and the pull-up resistor R 2 at the inverting stage) and the overall low-level-output property of the internal part of the integrated circuit are discussed, so the values of R 1, 2 are the most important in this paper, and assuming that the turn-on voltage V D of the Schottky diode D 1, 2 is constant at 0.3 V, the emitter junction of the anti-saturation Schottky transistor T 2, 5 is sufficiently conductive with a bias voltage of V BE = 0.7 V. Measurement and Analysis of the Power Supply Current I CC of the SN74LS00N Usually, the power supply currents (I CCH / I CCL of one gate at the output high / low level states) of the gate in different logic states are known to evaluate the power consumption of the circuit. But our purpose we measured I CCH / I CCL is to derive out the internal related parameters R 1, 2 of the integrated circuit that cannot be directly measured, and further confirm the working state of the gate. Since four NAND gates of the SN74LS00N use one power supply V CC , the currents I CC we measured are the total current I CC of the four NAND gates. In general, when all of the 8 inputs of the SN74LS00N are grounded, we can get the 4I CCH . If all 8 inputs are connected to V CC or left impending, the 4I CCL can be obtained. However, here we also measured the I CC in the other three states: the number l / h of gates whose logic states of output are low / high level is 1/3, 2/2 and 3/1, respectively, as shown in Figure 2, where h + l = 4. From the first and last data points in Fig. 2, we can measure the I CCH and I CCL as 0.265 mA and 0.820 mA, respectively. Combined with the other three data points, it is found that the linearity of their connection line is very good, and all three data points are also perfectly satisfied: (1) This shows that the parameters of the I CCH and I CCL related components are very consistent inside the four gates when the output is unloaded. However, both of these parameters are larger than the theoretical values calculated according to the principle circuit Fig.1. According to the conventional method of estimating the averaged gate power consumption P at the normal operating state, the average power consumption of each gate is This parameter is 35 % larger than the 2 mW given in all relevant manuals! All the inputs of the four NAND gates of this integrated circuit are suspended, that is, all the gates are on the turn-on state and V O = V OL , and the maximum total power consumption is up to 5V × 3.28mA = 16.4 mW, which should be deeply sensed by teachers, students and device users. In addition, the consistent between the measured input short-circuit current I IS value 0.264 mA of a single gate and the average I CCH = 0.265 mA per gate indicates that the I CCH basically flows through the input stage resistor R 1 , so that the actual R 1 can be derived: This is 89 % of the published design value 20 kΩ, but it is reasonable after all, because the precision of the large resistance inside the integrated circuit is intrinsically limited. Further, when the input A/B is the high level V IH , the base current I B2 flowing through T 2 can be relatively accurately calculated to determine whether T 2 is in the saturated state. At this time, first one can assume that T 2 is not turned on, but T 5 is turned on, then the voltage drop across R 6 (only calculated by design value 12 kΩ) is (5 -0.7) V × 12 / (17.8 + 12) = 2.1 V, which can guarantees the required bias voltage V BE2 = 0.7 V for T 2 's turning-on. Then the base current of T 2 is divided: CC BE BE B2 16 2 5 0.7 2 0.7 0.14 (mA) 17. 8 12 Because the current flowing through R 2 is: it is obvious that T 2 must be saturated, but T 2 is an anti-saturation transistor and its collector potential V C2 is clamped at 1.4 V -0.3 V = 1.1 V, then the value of R 2 can be calculated as: This is 78.6 % of the published design value of 8 kΩ, which is the main source of T 2 in the saturation conduction state and average power consumption over 2 mW. Since the discharge circuit composed of T 6 and R B,C is in a lightly conducting state at this time, the T 5 's base current I B5 ≈ I B2 + I C2 = 0.76 mA, the current amplification factor is assumed to be β ≈ 20 for T 5 , and the load sink current I OL < 8 mA is limited, T 5 must be in deep saturation. Measurement and Analysis of the V OL -I OL Property of the SN74LS00N The schematic diagram of the experimental test of the V OL -I OL property carried out in this paper is shown in Figure 3, where the current sinking loads of the low level output are adjustable resistors R L1,2 with 100 kΩ and 10 kΩ value, respectively. The test experiment used an EL-ELL-VI digital circuit teaching experiment instrument produced by Beijing Techshine Ltd. The V CC used the 5 V voltage source on the instrument. At the test time, V CC = 5.00 V ~ 5.04 V, and it was connected to the two inputs of the tested NAND gate, and all inputs of the other non-tested gate were grounded in order to reduce the power consumption of the entire SN74LS00N. In this paper, we have thoroughly tested the four NAND gates of SN74LS00N and found that the consistency is very good, so we give a V OL -I OL property curve of one of the NAND gates, as shown in Fig. 4. First, we found that the direct measurement of the output voltage V OL without a load is 0.040 V, and because the actual leakage current I CEO4 of T 4 makes the collector current I C5 of T 5 nonzero, this V OL which is very close to 0 V is very stable. This is intrinsically different from the collector clamp voltage of 0.2 to 0.3 V of the anti-saturation transistor indicated in the textbook, and also meets the expectation that the lower the output voltage is, the better it is. From this we can try to speculate that T 5 and T 4 are not or don't need to be anti-saturated Schottky transistors at all, but are ordinary low power transistors, which need to be verified from the manufacturer for further research. Then, we connected the 100 kΩ adjustable resistor R L1 with a resistance of about 50 kΩ. Carefully adjust R L1 to make I OL = 0.1 mA and find that the V OL rises slightly to 0.048 V. At the next steps, five V OL values are measured to I OL = 0.6 mA at an I OL increment of 0.1 mA at every step. In order to ensure the measurement accuracy, R L1 is changed to the 10 kΩ adjustable resistor R L2 , the measurement continues to I OL = 1.0 mA at an I OL increment of 0.1 mA at a step. Next, it is measured at an I OL increment of 0.5 mA to I OL = 10 mA at every step, and finally measured at I OL increments of 1 mA to I OL = 13.15 mA. All 32 data points are plotted in Fig. 4, where the data points I OL > 10 mA are L1 L2 : 10k only quickly adjusted near the integer mA in order to protect the integrated circuit, but absolutely do not affect the linearity of the entire property curve. From the linearity of the property curve, regardless of the neglect of the open/unloading voltage V OL(min) = 0.040 V, the gate output can be equivalent to a linear resistance R OL with a value of approximately 47 Ω. Thus, it can be seen that when I OL = 8 mA, V OL = 0.412 V, which is consistent with I OL(max) = 16 mA of the standard TTL series 7400 specification, so this is the root for I OL(max) of the LS TTL gate dropping to 8 mA as the typical sink current value. Of course, because the TTL 7400 integrated circuit is not available at the moment, comparative studies are not possible. Therefore, it is debatable to continue the teaching of " low-level output resistance R OL as low as 10 Ω for the LS TTL gate " in the textbook. In particular, the starting point of this line of the V OL -I OL property map cannot be drawn above 0.1 V, but it can be approximated as the starting point through the origin. It is also wrong to carry out the depiction in the text according to the starting point of 0.3V, but the starting point of the line of the V OL -I OL property map is drawn at the origin, which is not self-consistent between the depiction and map . In addition, it can be seen that when the V OL reaches the specification value V OL(max) = 0.5 V, the corresponding I OL is already about 10 mA, so some textbooks (including many materials explaining the standard TTL 74XX series) will be V OL(max) . It is not appropriate to do a complete correspondence with I OL (max) and V OL(max) (including text explanation and graphic explanation). Summary Through theoretical and experimental research on the power supply current change I CC and low-level output volt-ampere V OL -I OL property of the LS TTL NAND gate SN74LS00N, it is inferred that the key circuit parameters the resistor R 1 at the input stage and the pull-up resistor R 2 at the inverting stage of the internal part of the integrated circuit are respectively 17.8 kΩ and 6.29 kΩ, and the low-output output gate output is equivalent to a 47 Ω linear resistor (ignoring the small low voltage when the sink current load is idling), which can play a constructive help for the deep understanding on the LS TTL gate circuits and its experimental teaching, related electronic engineering design, as well as the preparation and teaching of the textbooks.
<gh_stars>1-10 import type { ComponentStory, ComponentMeta } from "@storybook/react"; import { EmptyStateNoTopicSelected } from "./EmptyStateNoTopicSelected"; export default { component: EmptyStateNoTopicSelected, } as ComponentMeta<typeof EmptyStateNoTopicSelected>; const Template: ComponentStory<typeof EmptyStateNoTopicSelected> = () => ( <EmptyStateNoTopicSelected /> ); export const Story = Template.bind({}); Story.args = {}; Story.storyName = "No topic selected";
// Processes the returned Bool value from checkHashAndPassword // and formats it for use in hashLines func matchPasswordAndHash(hashLine string) (result string, err error) { items := strings.Split(hashLine, " ") hash, password := items[1], items[0] err = checkHashAndPassword(hashLine) if err != nil { result = fmt.Sprintf("MATCH: FAIL, password: %s, hash: %s", password, hash) return result, err } result = fmt.Sprintf("MATCH: PASS, password: %s, hash: %s", password, hash) return result, err }
import discord from discord.ext import commands from discord.ext.commands.cooldowns import BucketType from .utils.message import get_nearest, get_avatar from PIL import Image, ImageDraw, ImageFont, ImageChops import io import random import math def save_image(img, *args, **kwargs) -> io.BytesIO: tmp = io.BytesIO() img.save(tmp, *args, **kwargs) tmp.seek(0) return tmp class Image_(commands.Cog, name='Image'): __slots__ = 'bot', def __init__(self, bot): self.bot = bot async def basic_image_command(self, ctx: commands.Context, pil_func, *args, filename='image.png'): async with ctx.typing(): # get_nearest defaults to nearest image image = await get_nearest(ctx) if image: img = await self.bot.loop.run_in_executor(None, pil_func, image, *args) await ctx.send(file=discord.File(img, filename=filename)) else: await ctx.send('No image found') def how_pil(self, image: bytes): how = Image.open('assets/how.jpg') image = Image.open(io.BytesIO(image)) image = image.resize((544, 529), Image.ANTIALIAS).convert('RGB') how.paste(image, (88, 0)) return save_image(how, format='JPEG', quality=50) @commands.command() async def how(self, ctx: commands.Context, *links): """HOW""" return await self.basic_image_command(ctx, self.how_pil, filename='HOW.jpeg') def networth_pil(self, pfp: bytes, name: str, user_id: int): networth = Image.new('RGBA', (1200, 421), color='WHITE') pfp = Image.open(io.BytesIO(pfp)) pfp = pfp.resize((276, 276), Image.ANTIALIAS) networth.paste(pfp, (907, 95)) draw = ImageDraw.Draw(networth) arial = ImageFont.truetype('arial.ttf', 35) text_w, _ = arial.getsize(name) draw.text((19, 21), name, font=arial, fill=(100, 100, 100)) draw.text((19 + text_w, 21), ' / Net worth', font=arial, fill=0) arial = ImageFont.truetype('arial.ttf', 61) rnd = random.Random(user_id) smooth = lambda x: x * x * (3 - 2 * x) money = round(smooth(rnd.random()) * 3000, 2) draw.text((33, 181), f'${money:,}', font=arial, fill=0) draw.line((0, 76, networth.width, 76), fill=(230, 230, 230), width=2) return save_image(networth, format='PNG') @commands.command() async def networth(self, ctx: commands.Context, user: discord.Member=None): """Shows net worth for given user""" user = user or ctx.author async with ctx.typing(): img = await self.bot.loop.run_in_executor(None, self.networth_pil, await get_avatar(user), user.name, user.id) await ctx.send(file=discord.File(img, filename='networth.png')) def google_pil(self, image: bytes): google = Image.open('assets/google.jpg') image = Image.open(io.BytesIO(image)) image = image.resize((526, 309), Image.ANTIALIAS).convert('RGB') google.paste(image, (0, 425)) return save_image(google, format='JPEG', quality=50) @commands.command() async def google(self, ctx: commands.Context, *links): """ Google https://google.com """ return await self.basic_image_command(ctx, self.google_pil, filename='google.jpeg') def byemom_pil(self, image: bytes): byemom = Image.open('assets/byemom.png') image = Image.open(io.BytesIO(image)) image = image.resize((340, 180), Image.ANTIALIAS).convert('RGB') byemom.paste(image, (0, 0)) return save_image(byemom, format='JPEG', quality=50) @commands.command() async def byemom(self, ctx: commands.Context, *links): """BYE MOM!!""" return await self.basic_image_command(ctx, self.byemom_pil, filename='BYEMOM!.jpeg') def reddit_pil(self, image: bytes, username: str): choice = random.choice(('wholesome', 'everyone', 'reddit', 'reddit-post', 'reddit-watermark', 'reddit-imin', 'reddit-killedher', 'reddit-tumblr', 'nobody')) if choice != 'nobody': overlay = Image.open(f'assets/{choice}.png') image = Image.open(io.BytesIO(image)) final = None if choice == 'reddit-post': image = image.resize((542, 512), Image.ANTIALIAS).convert('RGBA') overlay.paste(image, (78, 72)) final = overlay elif choice == 'reddit-watermark': overlay = overlay.resize((image.width // 3, image.height // 3), Image.ANTIALIAS) # wtf this is disgusting image.paste(overlay, (image.width // 3 * 2, image.height // 3 * 2), ImageChops.multiply(overlay, Image.new('RGBA', overlay.size, (255, 255, 255, 50)))) final = image elif choice == 'nobody': arial = ImageFont.truetype('arial.ttf', 25) _, text_h = arial.getsize('No one') nobody = Image.new('RGBA', (390, text_h * 9), color='WHITE') draw = ImageDraw.Draw(nobody) draw.text((10, 10), f'''Nobody: Not a single soul: Not even Keanu Reeves: Not even Big Chungus: Not even Redditors at Area 51: {username}:''', font=arial, fill='black') nobody = nobody.resize((image.width, int(image.width * nobody.height / nobody.width)), Image.ANTIALIAS) final = Image.new('RGBA', (image.width, image.height + nobody.height)) final.paste(nobody, (0, 0)) final.paste(image, (0, nobody.height)) else: overlay = overlay.resize((image.width, int(image.width * overlay.height / overlay.width)), Image.ANTIALIAS) final = Image.new('RGBA', (image.width, image.height + overlay.height)) final.paste(image, (0, 0)) final.paste(overlay, (0, image.height)) return save_image(final, format='PNG') @commands.command() async def reddit(self, ctx: commands.Context, *links): """Reddit post whoelsome""" return await self.basic_image_command(ctx, self.reddit_pil, ctx.author.name, filename='reddit.png') def clearly_pil(self, text): image = Image.open('assets/clearly.jpg') draw = ImageDraw.Draw(image) times = ImageFont.truetype('times.ttf', 21) s = draw.multiline_textsize(text, font=times) draw.multiline_text((image.size[0] / 2 - s[0] / 2, 330), text, fill='white', font=times, align='center') return save_image(image, format='JPEG', quality=50) @commands.command() @commands.cooldown(1, 5, BucketType.default) async def clearly(self, ctx: commands.Context, *, text): async with ctx.typing(): img = await self.bot.loop.run_in_executor(None, self.clearly_pil, text) await ctx.send(file=discord.File(img, filename='clearly.jpg')) def tucker_pil(self, image): image: Image.Image = Image.open(io.BytesIO(image)) tucker: Image.Image = Image.open('assets/tucker.png') size = image.size if size[0] < size[1]: w = size[0] // 4 h = (tucker.size[1] * w) // tucker.size[0] else: h = size[1] // 4 w = (tucker.size[0] * h) // tucker.size[1] image.paste(tucker.resize((w, h)), (size[0] - w, size[1] - h)) return save_image(image, format='PNG') @commands.command() @commands.cooldown(1, 5, BucketType.default) async def tucker(self, ctx: commands.Context, *_): return await self.basic_image_command(ctx, self.tucker_pil, filename='tucker.png') def setup(bot): bot.add_cog(Image_(bot))
Investigators say former PM would have been interviewed about claims he sexually abused children if he were still alive A police report has said there was reason to suspect that Sir Edward Heath, Britain’s seventh postwar prime minister, carried out a string of sex attacks over a span of decades. The report concludes that the allegations, including the rape of a male child sex worker aged 11 and sexual assault against four other children and two adults, would have met the legal threshold for police to interview Heath under criminal caution had he still been alive. The police said nothing should be inferred from their findings – after a total of 40 allegations – about the guilt or innocence of Heath, who died in 2005. The main allegations against Ted Heath Read more A police chief who oversaw the inquiry told the Guardian that a series of 26 alleged crimes, many against children, would be formally recorded in official law enforcement databases with Heath named as the suspect. Because of the rules under which officers were operating, the conclusion that there was enough reason to suspect Heath to merit interviewing him over at least seven claims was as much as the alleged victims could have got, said assistant chief constable Paul Mills, of Wiltshire police. The police report says: “Further to a proportionate investigation, reasonable grounds exist that, if Sir Edward Heath had been alive today, he would have been interviewed under caution regarding his suspected involvement in an offence.” Of the seven alleged offences deemed most credible by police, one was alleged to have happened while he was a cabinet minister in 1964 and another in 1967 after he became leader of the Conservative party. The first claim, the rape of a boy aged 11 in London, is alleged to have happened in 1961. Another two claims are alleged 31 years later, around 1992, years after Heath’s time as prime minister from 1970 to 1974. It was also alleged that Heath carried out crimes in 1962, against a boy aged 10 in a public place in Kent after a chance encounter. In 1964, when Heath is alleged to have committed an offence against a boy aged 15, Heath was secretary of state for trade and industry. Police said no evidence was found of a coverup or failure to investigate Heath while he was still alive. Facebook Twitter Pinterest Edward Heath giving a victory wave as he arrives at 10 Downing Street in 1970. Photograph: Frank Barratt/Getty Images There was some anger among senior politicians over the findings. Michael Heseltine, who knew Heath, said the report was devoid of evidence. “I am afraid that there isn’t any evidence; there is just speculation and allegations,” Lord Heseltine told Sky News. The two-year investigation was carried out by Wiltshire police, , whose Salisbury home is in their area, on behalf of 14 forces that received allegations about Heath. The Wiltshire chief constable, Mike Veale, who is facing calls for him to resign over the £1.5m pursuit of a dead man, said the investigation showed police would pursue serious allegations no matter how powerful the suspect. “This watershed moment regarding investigations of people connected to the establishment should not be underestimated,” he said. There has been an explosion in reporting of abuse claims to police after the Jimmy Savile scandal, in which the entertainer was found to have got away with hundreds of offences. The government and the police have faced claims first of covering up alleged abuse and then of over-reaction in the pursuit of high-profile figures over baseless allegations – such as Scotland Yard’s Operation Midland, which was found by an inquiry in 2016 to have fallen for lies from an alleged fantasist. The Wiltshire investigation into Heath, after decades of rumours about the former prime minister’s activities, was an attempt to establish the facts. Debate will rage about whether it has and whether the state can investigate the most serious claims about those who have held its highest office. The Guardian understands that there is no corroborative evidence – forensic or DNA evidence – for the seven claims judged most credible by police. The findings are based on an assessment of the accounts from those who say Heath attacked them. It is not unusual for a sexual assault case, let alone one dating back decades, to come down to the word of the complainant, but the key decisions in the investigation were checked and approved by a panel of senior detectives and a panel of non-police experts. Veale said: “We can only go where the evidence takes us.” Facebook Twitter Pinterest Mike Veale, the Wiltshire police chief constable. Photograph: Wiltshire police The report demolishes key defences used by Heath’s supporters which, they say, mean he could not have been an abuser. It says he was often without police protection and was not asexual. The report says: “Two witnesses, who have not disclosed abuse, provided evidence that he was sexually active with consenting adults during parts of his life.” James Gray, the MP for North Wiltshire and a Heath supporter, said: “My strong instinct is that he was entirely asexual, neither gay nor straight nor a paedophile. He was none of those things. “Most of the time he was escorted by close protection officers and he was a high-profile individual. I’m plain that these allegations are nonsensical, not true.” The Crown Prosecution Service will not give advice to police on whether the evidence they have gathered against a deceased suspect would be enough to prosecute. Wiltshire said they thus would not give an opinion on Heath’s guilt or innocence because that was not their role. A scrutiny panel that examined Wiltshire’s decision-making during the investigation included an ethicist and a human rights lawyer. It said: “The investigation was fair, sensitive and rigorous with regards to both victims and suspects.” Veale apologised for an appeal for complainants to come forward being made outside Heath’s house in August 2015, which critics said would encourage false claims. Edward Heath: whiff of scandal still surrounds enigmatic leader Read more Veale said the inquiry found no evidence of a coverup or that a blind eye was turned to Heath’s activities by agents of the state. The inquiry team spoke to Heath’s police protection officers over the years and the security services. But Veale said police could not investigate thoroughly enough, because it was beyond their remit, to satisfy or rebut claims of a coverup. The report released on Thursday was for public consumption. A fuller version will go to the Independent Inquiry into Child Sexual Abuse, where it will most likely be considered as part of an investigation into whether abuse by prominent people was covered up. Ken MacDonald, the former director of public prosecutions, said the force should be ashamed. “The bar for interview is low – in most investigations as low as the police want it to be, and in the case of a dead man virtually non-existent. They are covering their backs at the expense of a dead man. Shame on them.” Two other peers, David Hunt, who chairs the Sir Edward Heath Charitable Foundation, and Robert Armstrong, a former cabinet secretary and parliamentary private secretary to Heath while he was prime minister, said: “The Wiltshire police report is profoundly unsatisfactory because it neither justifies nor dispels the cloud of suspicion. As Sir Edward is dead, justice requires that there should be a quasi-judicial process as a substitute for the judicial process.”
from dataclasses import dataclass from typing import List from diamond_miner.defaults import UNIVERSE_SUBSET from diamond_miner.queries import GetInvalidPrefixes from diamond_miner.queries.query import LinksQuery, links_table from diamond_miner.typing import IPNetwork from diamond_miner.utilities import common_parameters @dataclass(frozen=True) class GetLinks(LinksQuery): """ Return the links pre-computed in the links table. >>> from diamond_miner.test import url >>> links = GetLinks(filter_invalid_prefixes=False).execute(url, 'test_invalid_prefixes') >>> len(links) 3 >>> links = GetLinks(filter_invalid_prefixes=True).execute(url, 'test_invalid_prefixes') >>> len(links) 1 >>> links = GetLinks(include_metadata=False).execute(url, 'test_nsdi_example') >>> len(links) 8 >>> links = GetLinks(include_metadata=True).execute(url, 'test_nsdi_example') >>> len(links) 8 >>> links = GetLinks(near_or_far_addr="172.16.58.3").execute(url, 'test_nsdi_example') >>> len(links) 3 """ filter_invalid_prefixes: bool = False "If true, exclude links from prefixes with amplification or loops." include_metadata: bool = False "If true, include the TTLs at which `near_addr` and `far_addr` were seen." def columns(self) -> List[str]: columns = ["near_addr", "far_addr"] if self.include_metadata: columns = ["near_ttl", "far_ttl", *columns] return columns def statement( self, measurement_id: str, subset: IPNetwork = UNIVERSE_SUBSET ) -> str: if self.filter_invalid_prefixes: invalid_prefixes_query = GetInvalidPrefixes( **common_parameters(self, GetInvalidPrefixes) ) prefix_filter = f""" probe_dst_prefix NOT IN ({invalid_prefixes_query.statement(measurement_id, subset)}) """ else: prefix_filter = "1" return f""" SELECT DISTINCT {','.join(self.columns())} FROM {links_table(measurement_id)} WHERE {self.filters(subset)} AND {prefix_filter} """
<filename>demo/typescript/utils/enums.ts<gh_stars>10-100 //expects a regular enum, not one that overrides the assignment //e.g. enum Foo { bar = "bar" } is NOT okay here, but enum Foo {bar} is // //takes an enum and gives back an array of Name,Index pairs //get_enum_pairs :: Enum -> Array (Number, String) export const get_enum_pairs = (target:any):Array<[number, string]> => Object.keys(target) .map(index => Number(index)) .filter(index => !isNaN(index)) .map(index => ([index, target[index]])); //converts a list of pair like Array<[number, string]> into a straight list of Array<string> export const enum_pairs_to_list = (xs:Array<[number, string]>):Array<string> => xs.reduce((acc, curr) => { acc[curr[0]] = curr[1]; return acc; }, []);
/** * For now we need to manually construct our Configuration, because we need to * override the default one and it is currently not possible to use * dynamically set values. * * @return */ public static Configuration createConfiguration() { Configuration conf = new Configuration(); conf.addResource("nutch-default.xml"); conf.addResource("nutch-site.xml"); conf.addResource("crawl-tests.xml"); return conf; }
<filename>IF/philip_pal/tests/test_philip_base_if.py<gh_stars>10-100 """ Tests for the basic PHiLIP interface """ import pytest from conftest import _regtest def test_send_and_parse_cmd(phil_base, regtest): """Tests basic send and parse command""" _regtest(regtest, phil_base.send_and_parse_cmd("rr 0 10")) _regtest(regtest, phil_base.send_and_parse_cmd("rr 0 2")) _regtest(regtest, phil_base.send_and_parse_cmd("rr 0 2", True)) _regtest(regtest, phil_base.send_and_parse_cmd("rr 0 2", True, 0.1)) _regtest(regtest, phil_base.send_and_parse_cmd("help")) _regtest(regtest, phil_base.get_version()) def test_read_bytes_success(phil_base, regtest): """Tests basic read bytes command""" _regtest(regtest, phil_base.read_bytes(0)) _regtest(regtest, phil_base.read_bytes("1")) _regtest(regtest, phil_base.read_bytes(3, 2)) _regtest(regtest, phil_base.read_bytes(4, "0")) _regtest(regtest, phil_base.read_bytes(5, 10)) _regtest(regtest, phil_base.read_bytes(5, 8)) _regtest(regtest, phil_base.read_bytes(5, 8, True)) _regtest(regtest, phil_base.read_bytes(5, 8, True, 0.1)) def test_read_bytes_fail(phil_base): """Tests failure cases of read bytes""" assert phil_base.read_bytes(99999)['result'] == phil_base.RESULT_ERROR assert phil_base.read_bytes(0, 99999)['result'] == phil_base.RESULT_ERROR def test_write_bytes_success(phil_base, regtest): """Tests write bytes command""" _regtest(regtest, phil_base.write_bytes(0, [9, 8, 7])) _regtest(regtest, phil_base.write_bytes(3, 99)) _regtest(regtest, phil_base.write_bytes(7, "88")) _regtest(regtest, phil_base.read_bytes(0, 8, True)) _regtest(regtest, phil_base.read_bytes(0, 8, True, 0.1)) def test_write_bytes_fail(phil_base): """Tests failure cases of write bytes command""" assert phil_base.write_bytes(99999, 0)['result'] == phil_base.RESULT_ERROR errl = [0] * 512 assert phil_base.write_bytes(0, errl)['result'] == phil_base.RESULT_ERROR with pytest.raises(ValueError): phil_base.write_bytes(0, 'xxx') def test_read_bits_success(phil_base, regtest): """Tests various read bits and compares with regession""" phil_base.write_bytes(0, [0x91, 0x1F, 0, 0, 0, 0, 0, 1]) _regtest(regtest, phil_base.read_bits(0, 0, 1)) _regtest(regtest, phil_base.read_bits(0, 0, 1, 0.1)) _regtest(regtest, phil_base.read_bits(0, 0, 5)) _regtest(regtest, phil_base.read_bits(0, 4, 1)) _regtest(regtest, phil_base.read_bits(0, 4, 4)) _regtest(regtest, phil_base.read_bits(0, 7, 9)) _regtest(regtest, phil_base.read_bits(0, 0, 7)) _regtest(regtest, phil_base.read_bits(0, 0, 8)) _regtest(regtest, phil_base.read_bits(0, 0, 9)) _regtest(regtest, phil_base.read_bits(0, 8, 1)) _regtest(regtest, phil_base.read_bits(0, 8, 8)) _regtest(regtest, phil_base.read_bits(0, 0, 16)) _regtest(regtest, phil_base.read_bits(0, 0, 64)) def _assert_bits(phil, offset, bits, data): w_data = phil.write_bits(0, offset, bits, data) r_data = phil.read_bits(0, offset, bits) assert w_data['result'] == phil.RESULT_SUCCESS assert r_data['result'] == phil.RESULT_SUCCESS assert r_data['data'] == data def test_write_bits_success(phil_base, regtest): """Test write bytes by writing user reg then comaparing it to a read value """ _regtest(regtest, phil_base.write_bits(0, 0, 9, 0x102, 0.1)) _assert_bits(phil_base, 0, 1, 1) _assert_bits(phil_base, 1, 2, 2) _assert_bits(phil_base, 2, 2, 3) _assert_bits(phil_base, 3, 15, 32000) def test_base_functions(phil_base): """Tests if misc commands pass""" assert phil_base.execute_changes()['result'] == phil_base.RESULT_SUCCESS assert phil_base.reset_mcu()['result'] == phil_base.RESULT_SUCCESS assert phil_base.get_version()['version']
from django.contrib import admin from .models import Food # Register your models here. class FoodAdmin(admin.ModelAdmin): #식품이름, 총내용량, 열량, 탄수화물, 단백질, 지방 list_display = ('DESC_KOR', 'SERVING_SIZE', 'NUTR_CONT1', 'NUTR_CONT2', 'NUTR_CONT3', 'NUTR_CONT4') admin.site.register(Food, FoodAdmin)
package trivia import ( "fmt" "io/ioutil" jsoniter "github.com/json-iterator/go" ) // SlideList represents a list of Slides type SlideList struct { Slides []Slide `json:"slides"` } // Slide represents one image which may have a trivia question and answers type Slide struct { Image string `json:"image"` Question string `json:"question,omitempty"` Answers []string `json:"answers,omitempty"` CorrectAnswer int `json:"correct,omitempty"` } // AnswerKey returns the correct answers for each trivia question // 0 = no answer, 1+ = the correct answer func (s *SlideList) AnswerKey() []int { answers := make([]int, len(s.Slides)) for i, slide := range s.Slides { answers[i] = slide.CorrectAnswer } return answers } // GetRawSlideJSON returns the JSON as a string func GetRawSlideJSON(filepath string) (string, error) { str, err := ioutil.ReadFile(filepath) if err != nil { return "", err } fmt.Println("Successfully opened ", filepath) return string(str), nil } // GetSlideJSON returns a SlideList built from the JSON func GetSlideJSON(filepath string) (*SlideList, error) { slides := &SlideList{} str, err := ioutil.ReadFile(filepath) if err != nil { return slides, err } fmt.Println("Successfully opened ", filepath) json := jsoniter.ConfigCompatibleWithStandardLibrary if err := json.Unmarshal(str, slides); err != nil { return slides, err } return slides, nil }
/** * Retests a step request with a specific exclusion filter */ public void testJDIClassExclusionFilter2() { StepRequest request = getRequest(); request.addClassExclusionFilter("org.eclipse.*"); request.addClassExclusionFilter("java.lang.*"); request.enable(); StepEvent event = null; try { event = triggerStepAndWait(getThread(), request, 1000); } catch (Error e) { } if (event != null) { System.out.println(event.location().declaringType()); assertTrue("1", false); } waitUntilReady(); fVM.eventRequestManager().deleteEventRequest(request); }
// AssignPropertiesFromPoliciesStatus populates our Policies_Status from the provided source Policies_Status func (policies *Policies_Status) AssignPropertiesFromPoliciesStatus(source *v20210901s.Policies_Status) error { propertyBag := genruntime.NewPropertyBag(source.PropertyBag) if source.ExportPolicy != nil { var exportPolicy ExportPolicy_Status err := exportPolicy.AssignPropertiesFromExportPolicyStatus(source.ExportPolicy) if err != nil { return errors.Wrap(err, "calling AssignPropertiesFromExportPolicyStatus() to populate field ExportPolicy") } policies.ExportPolicy = &exportPolicy } else { policies.ExportPolicy = nil } if source.QuarantinePolicy != nil { var quarantinePolicy QuarantinePolicy_Status err := quarantinePolicy.AssignPropertiesFromQuarantinePolicyStatus(source.QuarantinePolicy) if err != nil { return errors.Wrap(err, "calling AssignPropertiesFromQuarantinePolicyStatus() to populate field QuarantinePolicy") } policies.QuarantinePolicy = &quarantinePolicy } else { policies.QuarantinePolicy = nil } if source.RetentionPolicy != nil { var retentionPolicy RetentionPolicy_Status err := retentionPolicy.AssignPropertiesFromRetentionPolicyStatus(source.RetentionPolicy) if err != nil { return errors.Wrap(err, "calling AssignPropertiesFromRetentionPolicyStatus() to populate field RetentionPolicy") } policies.RetentionPolicy = &retentionPolicy } else { policies.RetentionPolicy = nil } if source.TrustPolicy != nil { var trustPolicy TrustPolicy_Status err := trustPolicy.AssignPropertiesFromTrustPolicyStatus(source.TrustPolicy) if err != nil { return errors.Wrap(err, "calling AssignPropertiesFromTrustPolicyStatus() to populate field TrustPolicy") } policies.TrustPolicy = &trustPolicy } else { policies.TrustPolicy = nil } if len(propertyBag) > 0 { policies.PropertyBag = propertyBag } else { policies.PropertyBag = nil } return nil }
async def _process_put(self, data: bytes): ttl, replication, _ = struct.unpack(">HBB", data[:4]) key = data[4:36].hex() value = data[36:].hex() logger.info( f"Handling put message: Hex Key {key} [TTL {ttl}, replication {replication}] => Hex Value [{value}]" ) await self.chord_node.put_key(key, value, int(ttl))