content
stringlengths
10
4.9M
/** * Builds implementation-specific cache. * @return a Cache object */ private Cache<String, Object> buildCache() { return Caffeine.newBuilder() .expireAfterWrite(expiresAfter, timeUnit) .maximumSize(maxSize) .build(); }
/** * [Hard] 1349. Maximum Students Taking Exam * https://leetcode.com/problems/maximum-students-taking-exam/ * * Given a m * n matrix seats that represent seats distributions in a classroom. If a seat is broken, it is denoted by * '#' character otherwise it is denoted by a '.' character. * * Students can see the answers of those sitting next to the left, right, upper left and upper right, but he cannot see * the answers of the student sitting directly in front or behind him. Return the maximum number of students that can * take the exam together without any cheating being possible.. * * Students must be placed in seats in good condition. * * Example 1: * <img src="./Q1349_PIC.png"> * Input: seats = [["#",".","#","#",".","#"], * [".","#","#","#","#","."], * ["#",".","#","#",".","#"]] * Output: 4 * Explanation: Teacher can place 4 students in available seats so they don't cheat on the exam. * * Example 2: * * Input: seats = [[".","#"], * ["#","#"], * ["#","."], * ["#","#"], * [".","#"]] * Output: 3 * Explanation: Place all students in available seats. * * Example 3: * * Input: seats = [["#",".",".",".","#"], * [".","#",".","#","."], * [".",".","#",".","."], * [".","#",".","#","."], * ["#",".",".",".","#"]] * Output: 10 * Explanation: Place students in available seats in column 1, 3 and 5. * * Constraints: * * seats contains only characters '.' and'#'. * m == seats.length * n == seats[i].length * 1 <= m <= 8 * 1 <= n <= 8 */ @RunWith(LeetCodeRunner.class) public class Q1349_MaximumStudentsTakingExam { /** * 根据hint 中的提示, 使用每行的 bit mask 来表示每行座位的分配. * 由此根据上一行得出下一行的结果. */ @Answer public int maxStudents(char[][] seats) { final int m = seats.length, n = seats[0].length; // dp[i+1][mask] 表示第i 行占用座位排列是 mask 时的最小结果. int[][] dp = new int[m + 1][1 << n]; // 哨兵, 由此所有结果都会 +1, 所以要在返回的时候 -1 dp[0][0] = 1; // bfs 缓存结果的数据结构 int[] cache = new int[1 << n]; int cacheLen; // 每行每行遍历 for (int i = 0; i < m; i++) { for (int mask = 0; mask < (1 << n); mask++) { if (dp[i][mask] == 0) { continue; } cache[0] = 0; cacheLen = 1; dp[i + 1][0] = Math.max(dp[i + 1][0], dp[i][mask]); // bfs 遍历该行可能的位组合 for (int j = 0; j < n; j++) { if (seats[i][j] == '#' || ((mask >> j - 1) & 1) == 1 || ((mask >> j + 1) & 1) == 1) { // 这里不能安排座位 continue; } for (int k = cacheLen - 1; k >= 0; k--) { if (((cache[k] >> j - 1) & 1) == 1) { // 前一个位置已经安排了座位 continue; } int newMask = cache[k] | (1 << j); cache[cacheLen++] = newMask; int count = 0; for (int l = 0; l < n; l++) { count += (newMask >> l) & 1; } dp[i + 1][newMask] = Math.max(dp[i + 1][newMask], count + dp[i][mask]); } } } } int res = 0; for (int i = 0; i < (1 << n); i++) { res = Math.max(res, dp[m][i] - 1); } return res; } /** * 简化了上面的求解过程, 思路一样, 更加慢了. */ @Answer public int maxStudents2(char[][] seats) { final int m = seats.length, n = seats[0].length; int[][] dp = new int[2][1 << n]; dp[0][0] = 1; for (int i = 0; i < m; i++) { int[] prev = dp[i % 2]; int[] curr = dp[(i + 1) % 2]; int currMask = 0; for (int j = 0; j < n; j++) { if (seats[i][j] == '.') { currMask |= 1 << j; } } for (int prevMask = 0; prevMask < (1 << n); prevMask++) { if (prev[prevMask] == 0) { continue; } // 遍历当前行所有可能的 mask 的情况, 判断是否满足条件. for (int mask = 0; mask < (1 << n); mask++) { if ((mask | currMask) == currMask && ((mask << 1) & prevMask) == 0 && ((mask >> 1) & prevMask) == 0) { int count = 0; for (int j = 0; j < n; j++) { count += (mask >> j) & 1; if (((mask >> j) & (mask >> j + 1) & 1) == 1) { count = -1; break; } } if (count >= 0) { curr[mask] = Math.max(curr[mask], count + prev[prevMask]); } } } } } int res = 0; for (int i = 0; i < (1 << n); i++) { res = Math.max(res, dp[m % 2][i] - 1); } return res; } @TestData public DataExpectation example1 = DataExpectation.create(new char[][]{ {'#', '.', '#', '#', '.', '#'}, {'.', '#', '#', '#', '#', '.'}, {'#', '.', '#', '#', '.', '#'} }).expect(4); @TestData public DataExpectation example2 = DataExpectation.create(new char[][]{ {'.', '#'}, {'#', '#'}, {'#', '.'}, {'#', '#'}, {'.', '#'} }).expect(3); @TestData public DataExpectation example3 = DataExpectation.create(new char[][]{ {'#', '.', '.', '.', '#'}, {'.', '#', '.', '#', '.'}, {'.', '.', '#', '.', '.'}, {'.', '#', '.', '#', '.'}, {'#', '.', '.', '.', '#'} }).expect(10); @TestData public DataExpectation normal1 = DataExpectation.create(new char[][]{ {'.', '#', '#', '.'}, {'.', '.', '.', '#'}, {'.', '.', '.', '.'}, {'#', '.', '#', '#'} }).expect(5); @TestData public DataExpectation normal2 = DataExpectation.create(new char[][]{ {'.', '#', '.', '#'}, {'.', '.', '.', '#'}, {'#', '#', '.', '.'}, {'.', '#', '#', '#'} }).expect(6); }
def rename_item(self, item): renamed = {} objects = ["A", "B", "C", "D", "E"] cnt = 0 for each in item.task: each[0] = "lefts" if each[1] in renamed: each[1] = renamed[each[1]] else: renamed[each[1]] = objects[cnt] each[1] = renamed[each[1]] cnt += 1 if each[2] in renamed: each[2] = renamed[each[2]] else: renamed[each[2]] = objects[cnt] each[2] = renamed[each[2]] cnt += 1 for each in item.choices[0]: each[0] = "lefts" if each[1] in renamed: each[1] = renamed[each[1]] if each[2] in renamed: each[2] = renamed[each[2]]
One expert says Israel's involvement in world affairs is limited by its focus on survival, but the precedent of saving Jews comes first. Should Israel expand its power? It is not an easy question to ask or an easy expectation to bear. Israel is a small country that has seen it as an imperative to punch above its weight and maintain a qualitiative strategic edge against its enemies. But as the country has prioritized self-preservation since its independence, the challenges are great and at times seemingly overwhelming. While the rise of General Abdel Fattah al-Sisi in Egypt may have somewhat changed things, Israel could have been said to have rivals in Cairo, Ankara and Tehran all at the same time before Sisi took power. At this same moment in history, a resurgence of anti-Semitism has extended pressure on the Jewish Diaspora for whom Israel is conceptually a mainstay and security guarantor. With the emergence of Islamic State (ISIS) and Iran, Israel is in search of friends - but might it be too weak to make them? There is precedent to teach Israelis how they might view their own international “power,” Professor Shmuel Sandler, researcher at the Begin-Sadat Center for Strategic Studies, told Arutz Sheva. “Ben-Gurion spoke of Israel as an or lagoyim – a light unto the nations – in a very spiritual way,” says Sandler, “but we have never had a chance to project that because we have been fighting for survival.” Regional Intervention Sandler illustrates that Jews over time have shown themselves to be more advocates for international peace and order, a realm that is “usually the lot of imperial powers” and not “small nations.” Yet, it would seem that in an ideal situation, Jewish Israel would support international institutionalism. He cites Michael Waltzer’s lecture Universalism and Jewish Values, the Biblical prophet Amos’ rebuke to the nations as well as prophecies by Yeshayahu (Isaiah) and Micah as illustrating support for an international system of order and law. This clearly presents issues for Israelis today, not because Israel is in violation of international law, but because of the widespread consensus among international law specialists in Israel and abroad that Israel could never get a fair hearing if it ever had to present itself to an international court like the International Criminal Court (ICC). But the striving for international order still weighs heavily on any sense of interventionism that might exist for Israelis. Plenty of essays about Passover illustrate there is a stream of liberation theology in Judaism. “Judaism is revolutionary only when the political order is based upon repression, not when it is based upon mutual obligation,” says Sandler in his essay Toward a Theory of World Jewish Politics. The multi-dimensional conflict between Syria and Iraq has solicited calls from other observers for Israel to be more assertive in the region - at least covertly - in order to aid the weak against the aggressive (i.e., numerous minorities in the region versus forces like ISIS). But many observers who might hope Israel would pursue a more assertive policy consistently comeback to needing to resolve the Palestinian question. In his book Regional Powers in the New Middle East, Robert Kappel states that "Israel is a lonely power. It needs a more assertive regional foreign policy." Is Israel capable of incorporating military intervention into its Middle Eastern strategy? “Yossi Beilin talked about intervention after peace with the Palestinians, then he said we should go for humanistic delegations as intervention but not military intervention; that is, not without a professional volunteer army.” Here, Sandler describes that the current mission of the Israeli Defense Forces does not envision foreign intervention as a form of defense, and hence would be hard to justify to ordinary Israelis. “When you have a draft you cannot ask some Jewish mother to send her son to fight some dictator in Somalia or some other place.” A Vanguard for Global Jewry “Whether or not Israel should prioritize Jews outside of Israel, that’s a good question,” Sandler told Arutz Sheva. “I bring some examples of that dilemma as a conflict between Israeli interests and Jewish communal interests like South Africa.” “In all those cases posing no actual existential threat to the State of Israel, the Jewish state took world Jewish interests into account,” says Sandler. “The very fact that the Jewish interest in Israel’s foreign policy was at least considered supports the thesis of a Jewish foreign policy on the normative level.” Sandler points to pidyon shvuim – the redeeming of captives – as having a strong influence on Israel’s foreign relations. Israel has spoken out on behalf of world Jewry consistently, whether those communities abroad welcome Israel’s intervention or not. French Jewish leaders were put off by Prime Minister Binyamin Netanyahu's assertive comments after the attack on the kosher Hyper Cacher market in January when he called on French Jews to make Aliyah and implicitly abandon France. Alternatively, it could be looked at that Netanyahu leveraged his own popularity among French Jews to create public pressure on French leaders to be more aggressive toward anti-Semitism in order to avoid further embarrassment at the words of Netanyahu. There are also numerous examples of Israel lobbying on behalf of Soviet Jewry and Algerian Jews, both displayed as situations where Israel had to balance its Jewish interests with its national interests. Sandler points to one example where Israel not only intervened diplomatically, but was willing to spend treasure to free Jews trapped behind one country’s border. “I’ve spoken with several diplomats influenced by Jewish-specific concerns. I know for a fact the Romanian Aliyah we paid money to the government - (the) Jewish foreign minister (communist) made a deal in the 1950s where Israel paid upwards of $100 a head.”
/// Switch to the next directory in the dirs list and apply a new wallpaper from it fn toggle(config_str: &String, rng: &mut ThreadRng) -> Result<()> { let mut config = parse_config(&config_str)?; let active_dir = config.active_dir.unwrap_or(0); // Switch active dir to next in dirs list, wrapping around config.active_dir = if active_dir + 1 < config.dirs.len() { Some(active_dir + 1) } else { Some(0) }; // Save the new config write_config(&config, &config_str)?; // Choose a new wallpaper from the new dir next(&config_str, rng)?; Ok(()) }
#include<stdio.h> int main(){ //444, 447, 474, 477, 744, 747, 774, 777, 47, 74, 44, 77, 4, 7 int lucky[] = {4, 7, 44, 47, 74, 77, 444, 447, 474, 477, 744, 747, 774, 777}; int input; scanf("%d", &input); int i, isAlmostLucky = 0; for(i = 0; i < 14; i++){ if(input % lucky[i] == 0){ printf("YES\n"); isAlmostLucky = 1; break; } } if(!isAlmostLucky){ printf("NO\n"); } return 0; }
Unfortunately, the voice actors are the very last to know anything about new projects. There have been many rumors surrounding Grand Theft Auto 6, and fans have been hanging onto any information they can possibly find about the game. Now, the latest person to offer up some sort of insight for fans is none other than the voice actor for the eccentric sociopath named Trevor from Grand Theft Auto 5, Steven Ogg. In a recent interview with Comicbook.com, the topic of GTA6 was brought up of course, and Ogg was asked if he knew anything about the development of the game or whether or not he would be involved. At much disappointment to fans, Ogg claimed that he has “no idea” about anything regarding a potential new GTA title, and as far as if he’d have anything to do with it, he said that the actors are typically the “very last to know anything” about the project. “I’ve heard about that, and people keep asking me, but I have no idea. The actor is the last to know anything, honestly. Like in everything.” Of course, Ogg was asked about his time working as Trevor, Ogg stated that he has very fond memories of his experience playing the character, explaining that the motion capture was the most fun and mostly what brought Trevor’s craziness to life. When asked if he thought that Trevor could possibly make a return in the next title in the series, Ogg explained that it is very unlikely, considering each new installment features a new story and a new set of characters. “He’s a great character, I don’t think they really follow … Those series don’t really follow the characters, they’re separate, so … GTA 6 is what it is and I wish whoever, whatever it is, all the best.” It’s not too much of a surprise that even a voice actor for one of the GTA titles doesn’t know much about the next project considering how secretive Rockstar is about its work. Although the idea is fun, and will more than likely happen within the next few years, fans shouldn’t get their hopes up too much for something soon considering Rockstar is still supporting GTA5 and GTA Online due to their continued success. While a new installment Grand Theft Auto remains a mystery, fans have the very highly-anticipated Red Dead Redemption 2 to look forward to scheduled for Spring 2018, as long as it doesn’t get delayed again. Would you like to see Trevor star in another GTA title? Let us know in our discussions and join us on Discord, on our Facebook page, or Twitter!
#ifndef _CONVOLVER_NODE_HPP_ #define _CONVOLVER_NODE_HPP_ #include "common.hpp" #include "audio-node.hpp" class ConvolverNode : public CommonNode { DECLARE_ES5_CLASS(ConvolverNode, ConvolverNode); public: static void init(Napi::Env env, Napi::Object exports); explicit ConvolverNode(const Napi::CallbackInfo &info); ~ConvolverNode(); void _destroy(); private: Napi::ObjectReference _buffer; JS_DECLARE_METHOD(ConvolverNode, destroy); JS_DECLARE_GETTER(ConvolverNode, buffer); JS_DECLARE_SETTER(ConvolverNode, buffer); JS_DECLARE_GETTER(ConvolverNode, normalize); JS_DECLARE_SETTER(ConvolverNode, normalize); }; #endif // _CONVOLVER_NODE_HPP_
/** * Method, which should process the given statement of type * {@link BreakpointStmt}, but is not implemented in the current version of * this method. If method will be called an exception is thrown. * * @param stmt * Statement that should be processed to check for security * violations. * @see soot.jimple.StmtSwitch#caseBreakpointStmt(soot.jimple.BreakpointStmt) * @throws UnimplementedSwitchException * Method throws always this exception, because the method is * not implemented. */ @Override public void caseBreakpointStmt(BreakpointStmt stmt) { throw new SwitchException(getMsg("exception.analysis.switch.not_implemented", stmt.toString(), getSourceLine(), stmt.getClass().getSimpleName(), this.getClass().getSimpleName())); }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.text; import android.test.InstrumentationTestCase; import android.test.MoreAsserts; import android.test.suitebuilder.annotation.MediumTest; import android.text.Spannable; public abstract class SpannableTest extends InstrumentationTestCase { protected abstract Spannable newSpannableWithText(String text); @MediumTest public void testGetSpans() { Spannable spannable = newSpannableWithText("abcdef"); Object emptySpan = new Object(); spannable.setSpan(emptySpan, 1, 1, 0); Object unemptySpan = new Object(); spannable.setSpan(unemptySpan, 1, 2, 0); Object[] spans; // Empty spans are included when they merely abut the query region // but other spans are not, unless the query region is empty, in // in which case any abutting spans are returned. spans = spannable.getSpans(0, 1, Object.class); MoreAsserts.assertEquals(new Object[]{emptySpan}, spans); spans = spannable.getSpans(0, 2, Object.class); MoreAsserts.assertEquals(new Object[]{emptySpan, unemptySpan}, spans); spans = spannable.getSpans(1, 2, Object.class); MoreAsserts.assertEquals(new Object[]{emptySpan, unemptySpan}, spans); spans = spannable.getSpans(2, 2, Object.class); MoreAsserts.assertEquals(new Object[]{unemptySpan}, spans); } }
/** * Get the list of emails to send and tries to send them. */ @TransactionTimeout(35000) @Schedules({ @Schedule(second = "0", minute = "*/10", hour = "*", persistent = false) }) public void sendEmail() { try { if (!running) { running = true; appLogger.info("START Execute Elections Mails Sending"); List<Email> emails = EJBFactory.getInstance().getMailsSendingEJB().getEmailsToSend(); Properties props = FilesUtils.getEmailProperties(); appLogger.debug("Email sending properties: " ); appLogger.debug(props.toString()); String host = EJBFactory.getInstance().getElectionsParametersEJB().getParameter(Constants.EMAIL_HOST); String user = EJBFactory.getInstance().getElectionsParametersEJB().getParameter(Constants.EMAIL_USER); String password = EJBFactory.getInstance().getElectionsParametersEJB().getParameter(Constants.EMAIL_PASSWORD); MailHelper.setSmtpHost(host); MailHelper.setUser(user); MailHelper.setPass(password); Session session = MailHelper.initSession(props); for (int i = 0; i < emails.size(); i++) { Email email = emails.get(i); try { appLogger.info("SENDING EMAIL " + email.getSubject() + " to " + email.getRecipients()); if(MailHelper.sendMail(session, email.getSender(), email.getRecipients(), email.getCc(), email.getBcc(), email.getSubject(), email.getBody())) { EJBFactory.getInstance().getMailsSendingEJB().markEmailAsSent(email); } else { appLogger.error("ERROR sending mail to " + email.getRecipients()); } if (i % 500 == 0) Thread.sleep(5000); } catch (MessagingException e) { appLogger.error("ERROR sending mail to " + email.getRecipients()); appLogger.error(e); } catch (Exception e) { appLogger.error("ERROR sending mail to " + email.getRecipients()); appLogger.error(e); } } appLogger.info("END Execute Elections Mails Sending"); running = false; } else { if (attempts < 8) { attempts = attempts + 1; appLogger.info("WARM Currently running, sending rescheduled for " + attempts + " time."); } else { appLogger.info("WARM Last time this job is rescheduled, " + attempts + " attempt(s)"); attempts = 0; running = false; } } } catch (Exception e1) { appLogger.error(e1); } }
/** * A specialised version of {@link ScrollPanel} that is intended for use with a * {@link TextArea} to provide a scroll-able entry area. This particular class * has special requirements for configuring the layout. If you add a * {@link TextArea} to an ordinary {@link ScrollPanel} the entire component * itself will be scrolled. * * @author rockfire */ public class TextAreaScrollPanel extends ScrollPanel { public TextAreaScrollPanel(TextArea textArea) { super(textArea.getScreen()); invalidate(); setHorizontalScrollBarMode(ScrollBarMode.Never); setKeyboardFocusable(true); setIgnoreMouseWheel(true); setHoverable(true); setVerticalScrollBarMode(ScrollBarMode.Auto); setScrollContentLayout(new TextScrollLayout()); getScrollableArea().setUseParentPseudoStyles(true); getScrollBounds().setUseParentPseudoStyles(true); addScrollableContent(textArea); validate(); } @Override public ScrollPanel addScrollableContent(BaseElement child, Object constraints) { if (getScrollableArea().getElements().size() > 0 || !(child instanceof TextArea)) throw new IllegalStateException(String.format("A %s may only contain a single %s. There are %d elements.", TextAreaScrollPanel.class, TextArea.class)); return super.addScrollableContent(child, constraints); } @Override public ScrollPanel insertScrollableContent(BaseElement el, int index) { if (getScrollableArea().getElements().size() > 0 || !(el instanceof TextArea)) throw new IllegalStateException( String.format("A %s may only contain a single %s", TextAreaScrollPanel.class, TextArea.class)); return super.insertScrollableContent(el, index); } class TextScrollLayout extends WrappingLayout { TextScrollLayout() { setOrientation(Orientation.HORIZONTAL); setEqualSizeCells(true); setWidth(1); setFill(false); } } }
Dynamin and Activity Regulate Synaptic Vesicle Recycling in Sympathetic Neurons* Neurotransmission in central neuronal synapses is supported by the recycling of synaptic vesicles via endocytosis at different time scales during and after transmitter release. Here, we examine the kinetics and molecular determinants of different modes of synaptic vesicle recycling at a peripheral neuronal synapse formed between superior cervical ganglion neurons in culture, via acute disruption of endocytosis with Dynasore, an inhibitor of dynamin activation, or a dynamin peptide (P4) that perturbs linkage of dynamin to clathrin coats through amphiphysin. When paired action potentials are generated to produce excitatory postsynaptic potential responses, the second response was reduced after application of Dynasore but not P4. In addition, graded reduction in synaptic transmission during a train of action potentials was accelerated by Dynasore but enhanced by P4. After full depletion of releasable vesicles, P4 delayed the recovery of synaptic transmission while Dynasore limited recovery to 10%. In control neurons, synaptic transmission is stable for more than 1 h under low frequency presynaptic stimulation (0.2 Hz), but was reduced gradually by P4 and rapidly but incompletely blocked by Dynasore at a much lower stimulation frequency. These results suggest two essential modes of dynamin-mediated synaptic vesicle recycling, one activity-dependent and the other activity-independent. Our findings extend the current understanding of synaptic vesicle recycling to sympathetic nerve terminals and provide evidence for a physiological and molecular heterogeneity in endocytosis, a key cellular process for efficient replenishment of the vesicle pool, and thus for synaptic plasticity. The cycling of synaptic vesicles (SVs) 3 through repetitive episodes of exocytosis and endocytosis is fundamental to synaptic transmission (1). The classic endocytic cycle consists of vesicle exocytosis from a readily releasable pool (RRP) (2) followed by retrieval via a clathrin-mediated endocytotic pathway (3) that passes through a reserve pool (RP) (4) en route to the RRP (5,6). The classic pathway, well studied at the frog neuromuscular junction (3), also functions in brain central synapses (7,8). In addition, other nontraditional modes of recycling such as "kiss-and-run" (9) and fast recycling, which bypasses the RP (10), have been described (11)(12)(13)(14). Endocytic pathways have also been categorized in terms of their kinetics, as fast or slow (1,15). Together, it seems likely that various forms of SV recycling pathways function under different conditions of synaptic activity and in a cell type-specific manner (15,16). Although the precise mechanisms of synaptic reformation remain a matter of debate (3)(4)(5)(6)(7), the GTPase dynamin has a key role for this process (17)(18)(19). Dynamin oligomerization in endocytic pits mediates neck constriction and scission (20). As a component of the clathrin coat, amphiphysin interacts with dynamin and links clathrin-coats to dynamin (21). Dynamin binds to the Src homology 3 (SH3) domain of amphiphysin I via the PSRPNR sequence in the dynamin polyproline domain near its C terminus (22,23). A myristoylated peptide derived from this sequence called P4 (QVPSR-PNRAP) is able to competitively block dynamin binding to amphiphysin I and II in vitro (24) and inhibits SV endocytosis, thus resulting in the depression of transmitter release (25). Dynasore, a specific cell-permeable dynamin inhibitor (26), completely blocks SV endocytosis, suggesting an essential role for dynamin in all forms of compensatory SV endocytosis, including "kiss-and-run" events (27). However, in the calyx of Held, a dynamin-independent endocytosis was detected in the presence of Dynasore (28). The mechanism of SV recycling in sympathetic neurons remains an open question. To investigate this issue, we studied the cholinergic synapse formed between rat SCG neurons in culture (29). In this model system, it is possible to introduce reagents directly into presynaptic terminals by microinjection, and their effect on acetylcholine release evoked by action potentials can be monitored by recording EPSPs from neighboring neurons (29,30). By perturbing dynamin function with either P4 peptide or Dynasore, we examine both activity-dependent and -independent modes of endocytosis, as well as dynamin-dependent and -independent pathways for refilling of the RRP in sympathetic neurons. EXPERIMENTAL PROCEDURES Cultured SCG neurons were prepared as described previously (29,30). For immunocytochemistry, SCG neurons in culture (8 weeks) were fixed and stained as described previously (30) with polyclonal anti-dynamin 1 and anti-amphiphysin II antibodies (Santa Cruz Biotechnology, Santa Cruz, CA) or monoclonal anti-synaptophysin antibody (Sigma-Aldrich) (supplemental Fig. S1). For electrophysiology, SCG neurons 6 -8 weeks in culture were studied. EPSP recording and injection of peptides were performed as described previously (29,30). To measure the replenishment of the RRP with readily releasable SVs, either a paired-pulse protocol or 5-30 Hz stimulation for 2 s was applied. For each neuron pair, three recordings were performed every 2 min for each interval of stimuli, and the EPSP peak amplitudes were averaged to account for variations in transmitter release following repetitive action potentials. For the depletion of synaptic vesicles, action potentials were applied at 5 Hz for 4 min, and replenishment of readily releasable SVs was monitored by tracking the recovery of baseline EPSP via recording every 1 s. The peak amplitudes of EPSP were averaged, and the resultant values were smoothed by an eight-point moving average algorithm. To measure the change in readily releasable SVs during prolonged repetitive activity, EPSPs were recorded at either 0.2 or 0.05 Hz. The peak amplitudes were averaged and plotted against recording time with t ϭ 0 corresponding to the presynaptic injection of P4 (QVPSR-PNRAP), a scrambled control peptide (QPPASNPRVR), or bath application of Dynasore. 1 mM peptide in the injection pipette was applied as this is the concentration producing a maximum reduction of EPSP amplitude, whereas 5 mM peptide showed no further reduction. 4 For Dynasore bath application, 100 l of 1 mM Dynasore dissolved in 5% DMSO was drop-applied to a 1.25-ml bath. A final concentration of 80 M (0.4% DMSO) Dynasore was used to achieve maximum inhibition of endocytosis, because this concentration completely blocked all forms of endocytosis in hippocampal neurons (28). As a control, 5% DMSO was drop-applied, producing a bath concentration of 0.4%. To reach the final concentration it takes a few minutes after bath superfusion was stopped (31). Before recording EPSPs at 20 min after the Dynasore application, cultured SCG neurons showed no spontaneous synaptic activity (32), suggesting that reduction of SVs in the RRP with the treatment is unlikely. Error bars shown in the text and figures represent mean Ϯ S.E. A two-tailed Student t test was applied as indicated. Readily Releasable SVs after Evoked Transmitter Release- To examine the readily releasable SVs after single action potential-evoked transmitter release, a paired-pulse protocol was applied under acute disruption of endocytosis by P4 or Dynasore ( Fig. 1). Synaptic responses induced by two consecutive action potentials showed a depression of the second response (paired-pulse depression) with inter-stimulus interval (ISI) of 20 -100 ms. In contrast, at longer ISIs (200 -2000 ms) the amplitudes of the second response were similar to the first (Fig. 1A). Paired-pulse responses were subsequently recorded 20 min after either P4 (Fig. 1A) or Dynasore application (Fig. 1B). The amplitude and the ratio of the EPSPs did not change with P4 (Fig. 1A). In contrast, with Dynasore the amplitude of the second EPSP decreased more than the first EPSP (with ISI ϭ 50 ms: 14.7 Ϯ 0.9 mV 1st and 11.2 Ϯ 0.7 mV 2nd before Dynasore and 8.8 Ϯ 0.5 mV 1st and 5.0 Ϯ 0.7 mV 2nd after Dynasore; with ISI ϭ 120 ms: 13.2 Ϯ 0.9 mV 1st and 14.7 Ϯ 0.9 mV 2nd before Dynasore and 9.3 Ϯ 0.5 mV 1st and 6.6 Ϯ 1.0 mV 2nd after Dynasore, mean Ϯ S.E., n ϭ 5; p Ͻ 0.05, paired Student t test) (Fig. 1B, panel b). Thus the paired-response ratio decreased with Dyna-FIGURE 1. Replenishment of readily releasable SVs after single evoked action potential. EPSPs were elicited by two consecutive action potentials with various inter-stimulus-interval (ISI). A, before and after microinjection of P4 (QVPSRPNRAP). a, EPSPs from one representative experiment recorded before P4 (black), at 35 min post-P4 (red, upper trace) with an ISI of 50 ms, and at 45 min post-P4 (red, lower trace) with an ISI of 120 ms. b, the ratio of the peak amplitude of the second EPSP to the first EPSP is plotted against the ISI. Black line, control EPSPs; red line, 20 -75 min after P4. B, before and after bath application of Dynasore. a, EPSPs from one representative experiment recorded before (black) and 20 min after Dynasore (pink, upper trace) with an ISI of 50 ms and 26 min after Dynasore (pink, lower trace) with an ISI of 120 ms. b, the EPSP amplitude with the ISI of 50 ms and 120 ms before (gray columns) and after Dynasore (pink columns). c, the ratio of the peak amplitude of the second EPSP to the first EPSP. b and c, mean Ϯ S.E. (n ϭ 5-6); *, p Ͻ 0.05; **, p Ͻ 0.01; paired Student t test. sore (with ISI ϭ 50 ms, from 0.62 Ϯ 0.03 to 0.45 Ϯ 0.06; with ISI ϭ 120 ms, from 1.1 Ϯ 0.03 to 0.70 Ϯ 0.08; p Ͻ 0.05, paired t test) (Fig. 1B, panel c). These results suggest that dynamin dysfunction prevented replenishment of readily releasable SVs with an ISI of Ͻ120 ms, although the clathrin-mediated pathway might not function in replenishment of readily releasable SVs with an ISI of Ͻ2 s. The time for the dynamin-mediated replenishment after a single action potential is much shorter than previous reports for fast endocytosis in hippocampal synapses in imaging studies ( ϭ 0.4 -6 s) (15). Dynasore is a specific inhibitor for endocytosis (24,28) and unlikely to affect SV fusion. Although the mechanism of paired-pulse depression in central synapses is thought to be associated with a decrease in release probability (33), SVs are able to fully release from the RRP within tens of milliseconds in synapses of cultured SCG neurons when depression is removed (34). The results indicate that rapid replenishment of the RRP via dynamin-mediated SV recycling occurs after an evoked transmitter release. Readily Releasable SVs during and after Repetitive Transmitter Release-To examine readily releasable SVs during and after repetitive action potential firing, 2-s trains of 5-30 Hz action potentials were elicited every 2 min in five synaptic couples. The EPSP amplitude decreased during the train (Fig. 2, A-D, upper graphs), and the rate of the decay was dependent on the frequency of action potential generation (Fig. 2I). The trainevoked decrease in EPSP amplitude returned to the initial value 2 min after cessation of each train. Normalized first EPSP amplitudes recorded at 10, 20, and 30 Hz to those at 5 Hz were around 1 (Fig. 2, A-D and L). At 2 min after a series of recording, P4 or Dynasore was applied. EPSP recordings with trains of action potentials were resumed at 20 min after reagent application. In the presence of P4 or Dynasore, the EPSP amplitude did not return to the initial value during a 22-min cessation of firing. Normalized first EPSP amplitudes recorded at 5 Hz were 0.45 Ϯ 0.06 for P4 and 0.2 Ϯ 0.03 for Dynasore (Fig. 2, A, C, J, and L). Dynasore decreased EPSP amplitude rapidly with each successive train of action potentials, especially Ն10 Hz (Fig. 2, E-H and K). P4 enhanced the EPSP amplitude decrease at 20 and 30 Hz (G-I). As a control, the scrambled P4 peptide and DMSO control showed no change in EPSP amplitude (Fig. 2, B and D). The inhibition of the recovery with Dynasore was stronger than that with P4 (Fig. 2J). These results suggest that activitydependent endocytosis could be activated by a 2-s train of action potentials at Ͼ10 Hz, in addition to dynamin-mediated endocytosis seen after a single action potential (Fig. 1B). It should be noted that, in addition to recycling from the plasma membrane, Dynasore will inhibit vesicle budding from sorting endosomes that supply de novo synaptic vesicles to the RRP (35). Interestingly, the train-evoked decrease in EPSP amplitude in the presence of Dynasore returned to the initial value 2 min after cessation of each train. Normalized first EPSP amplitudes recorded at 10, 20, and 30 Hz were ϳ0.2 (Fig. 2, C, J, and L), suggesting a possible dynamin-independent pathway for replenishment of readily releasable SVs through the transport route from the RP to the RRP during the 2-min cessation of each train. We note that an incomplete effect of Dynasore or an effect of dynamin 3 at this synapse cannot be excluded with these data. However, inhibition of available dynamin 1 and 2 and most if not all components of endocytosis would be consistent with prior work employing similar concentrations of Dynasore (27,28). Replenishment of Readily Releasable SVs after Depletion of Releasable SVs-To test whether both dynamin-dependent and -independent pathways can replenish readily releasable SVs, SVs in the presynaptic terminals were depleted with 4-min trains of 5-Hz action potentials, and the recovery of EPSP amplitude was measured every 1 s. At the end of the train, the EPSP amplitude was within baseline noise levels (Fig. 3, A and B), and subsequently recovered at two distinct rates: fast and slow (see arrows in Fig. 3A, panel a). A control scrambled P4 peptide or DMSO did not show any reduction in the recovery rate (Fig. 3, A (panel b) and B (panel b)). In contrast, both P4 and Dynasore inhibited fast recovery (Fig. 3, A (panel a), B (panel a), and C). At 20 s after the train, the EPSP amplitude was significantly smaller than that before reagent applications (Fig. 3C, panel b). These results indicate that releasable SVs are depleted, while SVs in the RP may persist at the end of a 4-min train of action potentials. Furthermore, SVs in the RP may refill the RRP through the dynamin-dependent and clathrin-mediated pathway. In addition, readily releasable SVs may also be replenished through a dynamin-independent pathway in the presence of Dynasore. At 5 min after the train, the EPSP amplitude recovered to 41.4 Ϯ 4.5% with P4, but remained at 12.3 Ϯ 2.1% with Dynasore (Fig. 3C, panel b). The slow recovery could be described by a linear relationship (Fig. 3C, panel a). The slopes before and after P4 injection were 8.1 Ϯ 1.7%/min and 6.7 Ϯ 1.6%/min (p ϭ 0.63, unpaired t test), whereas the slope with Dynasore was 0 Ϯ 0.4%/min (Fig. 3C, panel a). These results demonstrate that the slow recovery rate was not significantly affected by P4 but blocked completely by Dynasore, suggesting the replenishment of readily releasable SVs through dynamindependent recycling (18) or de novo sorting via an endosomal pool (35). Together, the results suggest that the fast replenishment of the readily releasable SVs may involve SV transport from the RP via dynamin-mediated and non-dynamin-mediated pathway, whereas the slow replenishment of the readily releasable SVs may be achieved solely through dynamin-mediated endocytosis. Readily Releasable SVs during Low Frequency Repetitive Transmitter Release-To examine the role of the dynamin-mediated pathway in replenishment of readily releasable SVs during low frequency repetitive transmitter release, changes in the amplitude of EPSPs evoked by presynaptic action potentials at 0.2 or 0.05 Hz were measured (Fig. 4). P4 gradually reduced the EPSP amplitude at 0.2 Hz (Fig. 4A, panels a and c), but not at 0.05 Hz (Fig. 4A, panels b and c). At 40 min after P4 injection, reduction of EPSP amplitude was Ϫ47.9 Ϯ 9.4% at 0.2 Hz (n ϭ 7), and Ϫ11.7 Ϯ 6.4% at 0.05 Hz (n ϭ 4). This value was similar to the control value with the scrambled P4 peptide (Ϫ8.9 Ϯ 4.9% at 0.2 Hz; n ϭ 6) (Fig. 4A, panel d) In contrast, Dynasore reduced the EPSP amplitude at 0.2 and 0.05 Hz (Fig. 4B). The reduction rate was more rapid than that of P4 with 0.2 Hz stimuli (Fig. 4, A (panel a) versus B (panel a)). The decay time constant of the EPSP amplitude in the presence of Dynasore was 4.8 Ϯ 0.12 min at 0.2 Hz, and 13.2 Ϯ 0.17 min at 0.05 Hz (p Ͻ 0.01, unpaired t test) (Fig. 4B, panel d), whereas it was 28 Ϯ 1.0 min at 0.2 Hz in the presence of P4. These results suggest that dynamin also mediates replenishment of readily releasable SVs during low frequency firing. Surprisingly, the EPSP amplitude was very small at 60 min after Dynasore application, but not completely blocked. The amplitudes were 6.8 Ϯ 1.6% (at 0.2 Hz) and 6.8 Ϯ 0.9% (at 0.05 Hz) of the initial value before Dynasore application, suggesting that non-dynamin-mediated processes may function in replenishing readily releasable SVs, although further experiments will be necessary to address this issue in future. DISCUSSION In this study, we demonstrate that sympathetic neurons maintain synaptic transmission via the recycling of SVs through dynamin-mediated pathways during and after action potential activity. In addition, we provide evidence for a non-dynamin- F-H). I, effect of P4. Each EPSP amplitude was normalized to the first EPSP amplitude of each train frequency. The line shows a fitted curve with exponential decay. J, comparison of P4 and Dynasore effects. Each EPSP amplitude was normalized to the first EPSP amplitude of each frequency train recorded before reagent application. The line shows a fitted curve with exponential decay. K, decay time constant of EPSP amplitude against the action potentials number calculated with the first order exponential decay curve. L, amplitudes of the first EPSP of each train before and after application of P4, scrambled P4, Dynasore, and DMSO were normalized to the first EPSP amplitude at 5 Hz stimuli. *, p Ͻ 0.05, paired Student t test. mediated endocytic pathway, assuming that P4 and Dynasore blockade of dynamin-mediated recycling is complete. Refilling of the RRP via dynamin-mediated endocytic pathways was dependent on both rate and number of action potential firing (Figs. , in accord with activity-dependent recycling of synaptic vesicles observed at other synapses. In contrast, another mode of the RRP refilling through a dynamin-mediated endocytic pathway was activated independently of action potential firing rate and number (Figs. 1-4), consistent with an activityindependent pathway. The third pathway, not affected by dynamin dysfunction, was also activated at all rates or numbers of firing tested (Figs. 3 and 4) and was thus activity-independent; it is estimated that 10% of SVs in readily releasable SVs were replenished via this pathway to maintain efficient synaptic vesicle recycling with long lasting repetitive firing of the SCG neuron. Compared with neurons in the central nervous system, sympathetic nerve fibers show relatively low firing activity in the 0.5-to 7.5-Hz range in vivo (36). Thus, evidence for activity-dependent refilling of the RRP observed in this study may reflect physiological synaptic transmission in autonomic neurons in vivo. The kinetics of endocytosis is variable at different presynaptic terminals (15,16). In hippocampal neurons, imaging studies have shown a wide range of time constants for endocytosis from a ϭ 0.1-6 s for fast components (13,15) and ϭ 4 -90 s for slow clathrin-mediated endocytosis (15). In Drosophila neuromuscular synapses, two pathways of vesicle recycling (37) and two or three SVs pools, the RRP and the RP (38) or "immedi-ately releasable pool" (39), were documented. During short period or low frequency presynaptic activity, SVs in the RRP, including the immediately releasable pool but not those in the RP, participate in transmitter release, whereas vesicles in the RP are required during intense neuronal activity (Ͼ10 Hz). A dynamin mutant, shibire, exhibits rapid synaptic fatigue within 0.02 s of repetitive stimulation, a phenotype that cannot be explained by vesicle depletion, suggesting that dynamin is required for rapid replenishment of the RRP with synaptic vesicles (40). In the present study, reduction of the second of two consecutive EPSPs with an ISI of 0.05 s by Dynasore (Fig. 1B) suggests that in sympathetic neurons dynamin is also required for rapid replenishment of readily releasable SVs, in addition to its role in slow clathrin-mediated endocytosis, which contributes to SVs recycling via the RP. Our data suggest approximate time constants for endocytosis in cultured sympathetic neurons. The dynamin-mediated pathway is able to mobilize SVs to the RRP in Ͻ0.05 s after an action potential (Fig. 1B), and the dynamin-and clathrin-mediated pathway is able to transport SVs to the RRP in under 20 s (Fig. 4A), if P4, as proposed, could perturb clathrin-coats formation. Measuring the time constant of readily releasable SV replenishment via the non-dynamin-mediated pathway is technically difficult because of the small synaptic responses after dynamin inhibition. Using primary cortical cultures from dynamin 1 knockout mice, Ferguson et al. (18) recently demonstrated that neuron- application (B, panel a). After 1 min control recording, a 4-min stimulation at 5 Hz was applied to deplete synaptic vesicles. EPSP amplitudes were normalized to the mean EPSP amplitudes before the 4-min train. The values from five to seven experiments were averaged and smoothed with a moving average algorithm. Note that the EPSP amplitude was zero at the end of the train stimuli and showed partial recovery at two different rates after cessation of SV depletion: fast (small arrow) and slow (large arrow). P4 or Dynasore application was performed 10 min after the EPSP recording. As a control, a scrambled P4 peptide (A, panel b) or DMSO was applied (B, panel b). C, comparison of P4 and Dynasore effects shown in A, panel a, and B, panel a. a, straight lines are linear fits of the slow recovery phase. Orange, before P4 or Dynasore; cyan, after P4; violet, after Dynasore. b, EPSP amplitude 20 s (left) and 5 min (right) after the train cessation was normalized to mean EPSP amplitudes before the 4 min train. *, p Ͻ 0.05; **, p Ͻ 0.01; paired Student t test. specific dynamin 1 is required for rapid SV recycling during high frequency (Ͼ10 Hz) stimulation, but not after cessation of the stimulus train. Dynamin 3 appears to share a similar role to dynamin 1. In contrast, ubiquitously expressed Dynamin 2 may play a role in slow activity-independent constitutive replenishment of clathrin-coated vesicles. Here we report that presynaptic terminals of a sympathetic neuron have two dynamin-mediated SV replenishment pathways, which differ in activity dependence. The relationship of these two recycling modes to specific dynamin isoforms will require further investigation. The differential effects of P4 peptide and Dynasore may be accounted for different steps in which they are likely to participate in the endocytic pathway. P4 disrupts the interaction of dynamin with amphiphysin (24), which may interfere with formation of the clathrin-coat (21) or fission complex. On the other hand, Dynasore may act at a downstream step involving dynamin activity and subsequently provide constitutive inactivation of dynamin across the endocytic cycle. It is also possible that, by targeting different parts of the dynamin endocytic complex, P4 peptides and Dynasore may act on different time scales and efficacies and thus account for their differential effects. Clearly the two reagents provide unique insight into distinct roles of dynamin in endocytosis with respect to synaptic vesicle pool recovery during different patterns of action potential stimulation. We should caution that one cannot exclude incomplete inhibition of dynamin function by injected P4 peptides, which may be unable to fully target preassembled dynamin endocytic complexes due to steric hindrance and lower concentrations at nerve terminals. Because it was technically unfeasible to collect enough injected neurons for in vitro biochemical co-immunoprecipitation study, we are unable to provide experimental evidence to verify the assumption that injected P4 peptide could completely block the dynamin-amphiphysin interaction in our SCG neurons. Although the amphiphysin I/II knockout mice exhibit some defects in synaptic vesicle recycling (41), its essential role in clathrin-mediated endocytosis remains further investigation. It is possible that P4 might target not only to amphiphysin but also to other SH3 domain-containing proteins (24), thus playing a less specific role in blocking endocytosis than that by Dynasore. However, previous work in the SCG neuron system has indicated that injected peptides at even lower concentrations can disassemble other preformed synaptic protein complexes (42). Furthermore, P4 peptides have the efficacy to block clathrin-dependent endocytosis at intracellular concentrations (25) that are comparable to those used in the current study, although it was injected into different neuronal types and the effect was measured by different readouts. Likewise, one must interpret the Dynasore inhibition data with caution. Dynasore is a fast-acting cell-permeable small molecule that inhibits the GTPase activity of dynamin 1, dynamin 2, and Drp1, the mitochondrial dynamin (43). Thus, it remains possible that in SCG neurons Dynasore may inhibit endocytosis dependent on dynamins 1 and 2 but not dynamin 3. Incomplete inhibition can also not be excluded without an independent measure of endocytosis inhibition. To address these issues, investigation of the role of dynamin isoforms in synaptic vesicle endocytic pathways in SCG neurons is under further study. However, it should be noted that robust inhibition of dynamin 1 and 2 and most if not all kinetic components of endocytosis is consistent with prior work employing similar concentrations of Dynasore (27,28). The results in the present study suggest a dynamin-independent pathway for SV recycling in a sympathetic neuron synapse. Recent studies at other synapses support this conclusion. SV endocytosis at the large presynaptic terminal of the calyx of Held is partially independent of dynamin (29). Endocytosis activated during intense stimulation persists in the calyx synapse dialyzing with Dynasore and PQVPSRPNRAP (called pp11, one amino acid longer than P4) (29). In peripheral dorsal root ganglion neurons, a calcium-and dynamin-independent form of rapid endocytosis, which is controlled by protein kinase A-dependent phosphorylation, has been described (44). It will be of interest to further examine the molecular and physiological basis of vesicle fission via dynamin-independent endocytosis and its relationship to kiss-and-run and other proposed fast, non-classic modes of synaptic vesicle recycling. The synaptic short-term depression observed during repetitive neuronal firing may be attributed to multiple mechanisms, including a decrease in vesicle fusion probability, inactivation of voltage-gated Ca 2ϩ channels (34), or use-dependent inhibition of the vesicle release machinery (33). In hippocampal or neocortical neurons, rapidly recycled SVs in the RRP are capable of rapid reuse (45) and slow the rate of synaptic depression (46). Our study shows that synaptic transmission in cultured SCG neurons also decreases rapidly in response to repetitive action potential firing (Fig. 2). The decrease was strongly accelerated in the presence of Dynasore, suggesting that a rapid reduction in the number of vesicles available for fast release may contribute to synaptic depression. These results may suggest that dynamin-mediated pathways are critical to maintain baseline levels of neurotransmission. Together, our results along with other results in the literature indicate that distinct endocytic pathways may be engaged under distinct patterns of synaptic activity history, short-and long-term neuromodulation and cell type. For example, the release probability of presynaptic terminals (11), Ca 2ϩ and protein kinase activity may regulate the relative engagement of endocytotic pathways as a function of firing history and modulation (5,13,16). In addition, dynamic engagement of multiple modes of recycling may be useful in homeostatic maintenance of a working amplitude and gain of synaptic transmission under different stimulation patterns. In summary, the present data characterize the fundamental modes of SV recycling and refilling of the RRP in SCG neurons during and after single or sustained firing of action potentials, supporting the involvement of activity-dependent and -independent pathways with distinct molecular requirements for synaptic vesicle endocytosis.
// BadRequest returns an client error that has a status of 400 (bad request). // // The returned error has a PublicStatusCode() method, which indicates that the // status code is public and can be returned to a client. func BadRequest(msg ...string) errors.Error { return statusError{ message: makeMessage("bad request", msg), status: http.StatusBadRequest, } }
// ensureColumns applies a projection as necessary to make the output match the // given list of columns; colNames is optional. func (b *Builder) ensureColumns( input execPlan, colList opt.ColList, provided opt.Ordering, ) (execPlan, error) { cols, needProj := b.needProjection(input, colList) if !needProj { return input, nil } var res execPlan for i, col := range colList { res.outputCols.Set(int(col), i) } reqOrdering := exec.OutputOrdering(res.sqlOrdering(provided)) var err error res.root, err = b.factory.ConstructSimpleProject(input.root, cols, reqOrdering) return res, err }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Immutable; import com.azure.json.JsonReader; import com.azure.json.JsonSerializable; import com.azure.json.JsonToken; import com.azure.json.JsonWriter; import com.azure.search.documents.models.AnswerResult; import com.azure.search.documents.models.FacetResult; import com.azure.search.documents.models.SemanticPartialResponseReason; import com.azure.search.documents.models.SemanticPartialResponseType; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; /** Response containing search results from an index. */ @Immutable public final class SearchDocumentsResult implements JsonSerializable<SearchDocumentsResult> { /* * The total count of results found by the search operation, or null if the count was not requested. If present, * the count may be greater than the number of results in this response. This can happen if you use the $top or * $skip parameters, or if Azure Cognitive Search can't return all the requested documents in a single Search * response. */ private Long count; /* * A value indicating the percentage of the index that was included in the query, or null if minimumCoverage was * not specified in the request. */ private Double coverage; /* * The facet query results for the search operation, organized as a collection of buckets for each faceted field; * null if the query did not include any facet expressions. */ private Map<String, List<FacetResult>> facets; /* * The answers query results for the search operation; null if the answers query parameter was not specified or set * to 'none'. */ private List<AnswerResult> answers; /* * Continuation JSON payload returned when Azure Cognitive Search can't return all the requested results in a * single Search response. You can use this JSON along with @odata.nextLink to formulate another POST Search * request to get the next part of the search response. */ private SearchRequest nextPageParameters; /* * Reason that a partial response was returned for a semantic search request. */ private SemanticPartialResponseReason semanticPartialResponseReason; /* * Type of partial response that was returned for a semantic search request. */ private SemanticPartialResponseType semanticPartialResponseType; /* * The sequence of results returned by the query. */ private final List<SearchResult> results; /* * Continuation URL returned when Azure Cognitive Search can't return all the requested results in a single Search * response. You can use this URL to formulate another GET or POST Search request to get the next part of the * search response. Make sure to use the same verb (GET or POST) as the request that produced this response. */ private String nextLink; /** * Creates an instance of SearchDocumentsResult class. * * @param results the results value to set. */ public SearchDocumentsResult(List<SearchResult> results) { this.results = results; } /** * Get the count property: The total count of results found by the search operation, or null if the count was not * requested. If present, the count may be greater than the number of results in this response. This can happen if * you use the $top or $skip parameters, or if Azure Cognitive Search can't return all the requested documents in a * single Search response. * * @return the count value. */ public Long getCount() { return this.count; } /** * Get the coverage property: A value indicating the percentage of the index that was included in the query, or null * if minimumCoverage was not specified in the request. * * @return the coverage value. */ public Double getCoverage() { return this.coverage; } /** * Get the facets property: The facet query results for the search operation, organized as a collection of buckets * for each faceted field; null if the query did not include any facet expressions. * * @return the facets value. */ public Map<String, List<FacetResult>> getFacets() { return this.facets; } /** * Get the answers property: The answers query results for the search operation; null if the answers query parameter * was not specified or set to 'none'. * * @return the answers value. */ public List<AnswerResult> getAnswers() { return this.answers; } /** * Get the nextPageParameters property: Continuation JSON payload returned when Azure Cognitive Search can't return * all the requested results in a single Search response. You can use this JSON along with @odata.nextLink to * formulate another POST Search request to get the next part of the search response. * * @return the nextPageParameters value. */ public SearchRequest getNextPageParameters() { return this.nextPageParameters; } /** * Get the semanticPartialResponseReason property: Reason that a partial response was returned for a semantic search * request. * * @return the semanticPartialResponseReason value. */ public SemanticPartialResponseReason getSemanticPartialResponseReason() { return this.semanticPartialResponseReason; } /** * Get the semanticPartialResponseType property: Type of partial response that was returned for a semantic search * request. * * @return the semanticPartialResponseType value. */ public SemanticPartialResponseType getSemanticPartialResponseType() { return this.semanticPartialResponseType; } /** * Get the results property: The sequence of results returned by the query. * * @return the results value. */ public List<SearchResult> getResults() { return this.results; } /** * Get the nextLink property: Continuation URL returned when Azure Cognitive Search can't return all the requested * results in a single Search response. You can use this URL to formulate another GET or POST Search request to get * the next part of the search response. Make sure to use the same verb (GET or POST) as the request that produced * this response. * * @return the nextLink value. */ public String getNextLink() { return this.nextLink; } @Override public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { jsonWriter.writeStartObject(); jsonWriter.writeArrayField("value", this.results, (writer, element) -> writer.writeJson(element)); jsonWriter.writeNumberField("@odata.count", this.count); jsonWriter.writeNumberField("@search.coverage", this.coverage); jsonWriter.writeMapField( "@search.facets", this.facets, (writer, element) -> writer.writeArray(element, (writer1, element1) -> writer1.writeJson(element1))); jsonWriter.writeArrayField("@search.answers", this.answers, (writer, element) -> writer.writeJson(element)); jsonWriter.writeJsonField("@search.nextPageParameters", this.nextPageParameters); jsonWriter.writeStringField( "@search.semanticPartialResponseReason", Objects.toString(this.semanticPartialResponseReason, null)); jsonWriter.writeStringField( "@search.semanticPartialResponseType", Objects.toString(this.semanticPartialResponseType, null)); jsonWriter.writeStringField("@odata.nextLink", this.nextLink); return jsonWriter.writeEndObject(); } /** * Reads an instance of SearchDocumentsResult from the JsonReader. * * @param jsonReader The JsonReader being read. * @return An instance of SearchDocumentsResult if the JsonReader was pointing to an instance of it, or null if it * was pointing to JSON null. * @throws IllegalStateException If the deserialized JSON object was missing any required properties. * @throws IOException If an error occurs while reading the SearchDocumentsResult. */ public static SearchDocumentsResult fromJson(JsonReader jsonReader) throws IOException { return jsonReader.readObject( reader -> { boolean resultsFound = false; List<SearchResult> results = null; Long count = null; Double coverage = null; Map<String, List<FacetResult>> facets = null; List<AnswerResult> answers = null; SearchRequest nextPageParameters = null; SemanticPartialResponseReason semanticPartialResponseReason = null; SemanticPartialResponseType semanticPartialResponseType = null; String nextLink = null; while (reader.nextToken() != JsonToken.END_OBJECT) { String fieldName = reader.getFieldName(); reader.nextToken(); if ("value".equals(fieldName)) { results = reader.readArray(reader1 -> SearchResult.fromJson(reader1)); resultsFound = true; } else if ("@odata.count".equals(fieldName)) { count = reader.getNullable(JsonReader::getLong); } else if ("@search.coverage".equals(fieldName)) { coverage = reader.getNullable(JsonReader::getDouble); } else if ("@search.facets".equals(fieldName)) { facets = reader.readMap( reader1 -> reader1.readArray(reader2 -> FacetResult.fromJson(reader2))); } else if ("@search.answers".equals(fieldName)) { answers = reader.readArray(reader1 -> AnswerResult.fromJson(reader1)); } else if ("@search.nextPageParameters".equals(fieldName)) { nextPageParameters = SearchRequest.fromJson(reader); } else if ("@search.semanticPartialResponseReason".equals(fieldName)) { semanticPartialResponseReason = SemanticPartialResponseReason.fromString(reader.getString()); } else if ("@search.semanticPartialResponseType".equals(fieldName)) { semanticPartialResponseType = SemanticPartialResponseType.fromString(reader.getString()); } else if ("@odata.nextLink".equals(fieldName)) { nextLink = reader.getString(); } else { reader.skipChildren(); } } if (resultsFound) { SearchDocumentsResult deserializedSearchDocumentsResult = new SearchDocumentsResult(results); deserializedSearchDocumentsResult.count = count; deserializedSearchDocumentsResult.coverage = coverage; deserializedSearchDocumentsResult.facets = facets; deserializedSearchDocumentsResult.answers = answers; deserializedSearchDocumentsResult.nextPageParameters = nextPageParameters; deserializedSearchDocumentsResult.semanticPartialResponseReason = semanticPartialResponseReason; deserializedSearchDocumentsResult.semanticPartialResponseType = semanticPartialResponseType; deserializedSearchDocumentsResult.nextLink = nextLink; return deserializedSearchDocumentsResult; } List<String> missingProperties = new ArrayList<>(); if (!resultsFound) { missingProperties.add("value"); } throw new IllegalStateException( "Missing required property/properties: " + String.join(", ", missingProperties)); }); } }
#include<bits/stdc++.h> #define endl "\n" #define pb push_back #define ll long long #define d1(x) cerr << #x << "--> " << x << endl #define d2(x,y) cerr << #x << "--> " << x << " | " << #y << "--> " << y <<endl #define d3(x,y,z) cerr << #x << "--> " << x << " | " << #y << "--> " << y <<" | " << #z << "--> "<< z<< endl #define d4(x,y,z,w) cerr << #x << "--> " << x << " | " << #y << "--> " << y <<" | " << #z << "--> "<< z << " | "<< #w << "--> " << w <<endl #define vpll vector<pair<ll,ll>> #define F first #define S second #define T ll tt; cin>>tt; while(tt--) using namespace std; ll mode=1e9+7; const ll maxn=2*1e5+5; const ll inf=99999999999; ll __lcm(ll a, ll b){ return (a*b)/__gcd(a,b); } mt19937 rng(chrono::steady_clock::now().time_since_epoch().count()); ll getRand(ll l, ll r){ uniform_int_distribution<int> uid(l, r); return uid(rng); } template <class A, class B> ostream& operator << (ostream& out, const pair<A, B> &a) { return out << "(" << a.F << ", " << a.S << ")"; } template <class A> ostream& operator << (ostream& out, const vector<A> &v) { out << "["; for (int i=0;i<v.size();i++) { if(i) out << ", "; out << v[i]; } return out << "]"; } void solve(){ ll total, legs; cin>>total>>legs; for (int i=0;i<=200;i++){ for (int j=0;j<=200;j++){ if ((i+j)==total && (2*i+4*j)==legs){ cout<<"Yes"<<endl; return; } } } cout<<"No"<<endl; return; } int main(){ #ifndef ONLINE_JUDGE // freopen("input.txt", "r", stdin); #endif srand(time(0)); ios::sync_with_stdio(0); cin.tie(0); //T{ solve(); } solve(); return 0; }
<gh_stars>0 import { AnimationStyle, convertToPixels, findVisualStyleBySize, getComponentSize, hasChildContent, ItemValue, StyleButtonReset, StyleMargin, } from "@apptane/react-ui-core"; import { MediaObject } from "@apptane/react-ui-layout"; import { RadioButtonVisualAppearance, useVisualAppearance } from "@apptane/react-ui-theme"; import { css } from "@emotion/react"; import { useCallback } from "react"; import FieldLabel from "./FieldLabel"; import { RadioButtonProps, RadioButtonPropTypes } from "./RadioButton.types"; const StyleContainer = (width: string, inline?: boolean) => css` display: ${inline ? "inline-flex" : "flex"}; width: ${width}; `; const StyleTransition = (animation: AnimationStyle) => css` transition-property: stroke, fill; transition-delay: ${animation.delay}ms; transition-duration: ${animation.duration}ms; transition-timing-function: ${animation.function}; `; const StyleBase = (animation: AnimationStyle, size: number, readonly?: boolean) => css` ${StyleButtonReset}; display: block; // geometry width: ${size}px; height: ${size}px; // behavior cursor: ${readonly ? "inherit" : "pointer"}; > svg { display: block; stroke-linecap: round; stroke-linejoin: round; fill-rule: evenodd; > circle { ${StyleTransition(animation)}; } } `; const StyleUncheckedInteractive = (appearance: RadioButtonVisualAppearance) => css` &:hover:not(:focus) { > svg > circle:first-of-type { fill: ${appearance.unchecked.hover.back}; stroke: ${appearance.unchecked.hover.border}; stroke-width: ${appearance.unchecked.hover.borderWidth}px; } } &:focus { > svg > circle:first-of-type { fill: ${appearance.unchecked.focused.back}; stroke: ${appearance.unchecked.focused.border}; stroke-width: ${appearance.unchecked.focused.borderWidth}px; } } `; const StyleUncheckedDefault = (appearance: RadioButtonVisualAppearance, readonly?: boolean) => css` > svg > circle:first-of-type { fill: ${appearance.unchecked.default.back}; stroke: ${appearance.unchecked.default.border}; stroke-width: ${appearance.unchecked.default.borderWidth}px; } ${!readonly && StyleUncheckedInteractive(appearance)}; `; const StyleUncheckedDisabled = (appearance: RadioButtonVisualAppearance) => css` pointer-events: none; > svg > circle:first-of-type { fill: ${appearance.unchecked.disabled.back}; stroke: ${appearance.unchecked.disabled.border}; stroke-width: ${appearance.unchecked.disabled.borderWidth}px; } `; const StyleCheckedInteractive = (appearance: RadioButtonVisualAppearance) => css` &:hover:not(:focus) { > svg > circle:first-of-type { fill: ${appearance.checked.hover.back}; stroke: ${appearance.checked.hover.border}; stroke-width: ${appearance.checked.hover.borderWidth}px; } > svg > circle:last-of-type { fill: ${appearance.checked.hover.glyph}; } } &:focus { > svg > circle:first-of-type { fill: ${appearance.checked.focused.back}; stroke: ${appearance.checked.focused.border}; stroke-width: ${appearance.checked.focused.borderWidth}px; } > svg > circle:last-of-type { fill: ${appearance.checked.focused.glyph}; } } `; const StyleCheckedDefault = (appearance: RadioButtonVisualAppearance, readonly?: boolean) => css` > svg > circle:first-of-type { fill: ${appearance.checked.default.back}; stroke: ${appearance.checked.default.border}; stroke-width: ${appearance.checked.default.borderWidth}px; } > svg > circle:last-of-type { fill: ${appearance.checked.default.glyph}; } ${!readonly && StyleCheckedInteractive(appearance)}; `; const StyleCheckedDisabled = (appearance: RadioButtonVisualAppearance) => css` pointer-events: none; > svg > circle:first-of-type { fill: ${appearance.checked.disabled.back}; stroke: ${appearance.checked.disabled.border}; stroke-width: ${appearance.checked.disabled.borderWidth}px; } > svg > circle:last-of-type { fill: ${appearance.checked.disabled.glyph}; } `; /** * `RadioButton` component — used within `ToggleGroup`. */ function RadioButton<T extends ItemValue>({ children, label, disabled, readonly, checked, onClick, value, inline, colorMode, appearance, size = "default", width, labelWidth, accessibilityLabel, accessibilityRole, margin, marginTop, marginRight, marginBottom, marginLeft, m, mt, mr, mb, ml, }: RadioButtonProps<T>) { const [visualAppearance, theme] = useVisualAppearance<RadioButtonVisualAppearance>( "radioButton", colorMode, appearance, "none", typeof size === "number" ? "default" : size ); const actualSize = getComponentSize(theme.components.radioButton.sizes, size); const visualStyle = findVisualStyleBySize(theme.components.radioButton.styles, actualSize); const borderRadius = Math.ceil(actualSize / 2 - visualStyle.padding); const bulletRadius = Math.ceil(borderRadius * visualStyle.glyphSize); const clickHandler = useCallback( (event: React.SyntheticEvent) => { if (typeof onClick === "function") { onClick(event, value); } }, [value, onClick] ); // NOTE: we use <svg> graphics to draw both the border and the bullet // to ensure consistent rendering at different DPIs, mixing HTML borders // and SVG doesn't yield satisfactory results const element = ( <button css={[ StyleBase(theme.components.radioButton.animation, actualSize, readonly), !checked && !disabled && StyleUncheckedDefault(visualAppearance, readonly), !checked && disabled && StyleUncheckedDisabled(visualAppearance), checked && !disabled && StyleCheckedDefault(visualAppearance, readonly), checked && disabled && StyleCheckedDisabled(visualAppearance), ]} onClick={disabled || readonly ? undefined : clickHandler} tabIndex={readonly ? -1 : 0} role={accessibilityRole ?? "radio"} aria-checked={checked ? true : false} aria-label={accessibilityLabel} disabled={disabled || readonly} value={value}> <svg xmlns="http://www.w3.org/2000/svg" viewBox={`0 0 ${actualSize} ${actualSize}`} width={actualSize} height={actualSize}> <circle cx={actualSize / 2} cy={actualSize / 2} r={borderRadius} /> {checked && <circle cx={actualSize / 2} cy={actualSize / 2} r={bulletRadius} />} </svg> </button> ); const marginProps = { margin, marginTop, marginRight, marginBottom, marginLeft, m, mt, mr, mb, ml }; return ( <div css={[StyleContainer(convertToPixels(width) ?? "max-content", inline), StyleMargin(marginProps)]}> {hasChildContent(children) || label ? ( <MediaObject media={element} spacing={visualStyle.spacing} header={ label && ( <FieldLabel block colorMode={colorMode} width={labelWidth} disabled={disabled} readonly={readonly} onClick={disabled || readonly ? undefined : clickHandler}> {label} </FieldLabel> ) }> {children} </MediaObject> ) : ( element )} </div> ); } RadioButton.displayName = "RadioButton"; RadioButton.propTypes = RadioButtonPropTypes; export default RadioButton;
//loading files into array based on theme public String[] LoadBG(){ strLine = ""; try{ BufferedReader themefile = new BufferedReader(new FileReader("themes.txt")); while(strLine != null){ strLine = themefile.readLine(); if(strLine != null){ strThemeElements = strLine.split(","); if(strThemeElements[0].equalsIgnoreCase(strTheme)){ strThemeImages = strThemeElements; } } } themefile.close(); }catch(IOException e){ System.out.println("Error loading theme images"); } return strThemeImages; }
// newRole returns a new Role object. // The Role controls access to the tenant and its related clusters. func newRole(scheme *runtime.Scheme, tenant *synv1alpha1.Tenant) (*rbacv1.Role, error) { role := &rbacv1.Role{ ObjectMeta: metav1.ObjectMeta{ Name: tenant.Name, Namespace: tenant.Namespace, }, } setManagedByLabel(role) if err := controllerutil.SetOwnerReference(tenant, role, scheme); err != nil { return nil, err } roleUtil.EnsureRules(role) return role, nil }
n=int(input()) temp=(n*(n+1))//2 ex=0 if n>2: for i in range(1,n-1): ex+=i*(n-(i+1)) print(temp+ex)
MOD = 10**9+7 facts = [1] for i in range(1, 10**5+2): facts.append((i*facts[i-1])%MOD) def nCk(n, k): if n < k: return 0 return (facts[n]*pow(facts[k], MOD-2, MOD)%MOD * pow(facts[n-k], MOD-2, MOD))%MOD n = int(input()) a = list(map(int, input().split())) # Search doubling number and its indexes indexes = [[] for _ in range(n+1)] # 0 to n:max(a) for i in range(n+1): indexes[a[i]].append(i) for idx in indexes: if len(idx) == 2: idxs = sorted(idx) for k in range(1, n+1+1): p_all = nCk(n+1, k) p_double = nCk(n+1-(idxs[1]-idxs[0]+1), k-1) print((p_all-p_double)%MOD)
OZONE MONITORING BY GOME-2 ON THE METOP SATELLITES The first Global Ozone Monitoring Experiment (GOME) has been successfully operated on board of ERS–2 since its launch in April 1995. The objective to measure the global distribution of ozone and several other trace gases has been achieved. Based on this heritage the advanced GOME-2 has been selected as payload for the first three METOP satellites to cover the need of operational ozone monitoring for the next 15 years. GOME has shown its capability to monitor total column ozone, ozone profiles, nitrogen dioxide, sulphur dioxide, formaldehyde, and halogen oxides. Cloud parameters such cloud fraction within the field of view, cloud top height, and cloud optical thickness are measured as well. Aerosol properties like aerosol optical thickness have been retrieved successfully. Near real time data are being provided by KNMI, IFE Bremen and DLR for total column ozone and ozone profile by KNMI. GOME was limited to the restricted resources (data rate, mass, power) of the ERS-2 satellite. GOME-2 is using the larger satellite resources of METOP: • to increase the spatial resolution to 40 km * 40 km for the total column products, • to provide the measurement of both sand ppolarised components of the incoming radiation with a high spatial sampling (40 * 5 km), and • to enhance its on-board calibration and characterisation capabilities by adding a white light source. On-ground calibration and characterisation will be improved by allowing more calibration measurements under operational conditions. The ground processing of earth radiance and solar irradiance will use the knowledge of more than 4 years of operational processing.
/** * Convert the {@link RetryPolicy} into a gRPC service config for the {@code serviceName}. The * resulting map can be passed to {@link ManagedChannelBuilder#defaultServiceConfig(Map)}. */ public static Map<String, ?> toServiceConfig(String serviceName, RetryPolicy retryPolicy) { List<Double> retryableStatusCodes = RetryUtil.retryableGrpcStatusCodes().stream().map(Double::parseDouble).collect(toList()); Map<String, Object> retryConfig = new HashMap<>(); retryConfig.put("retryableStatusCodes", retryableStatusCodes); retryConfig.put("maxAttempts", (double) retryPolicy.getMaxAttempts()); retryConfig.put("initialBackoff", retryPolicy.getInitialBackoff().toMillis() / 1000.0 + "s"); retryConfig.put("maxBackoff", retryPolicy.getMaxBackoff().toMillis() / 1000.0 + "s"); retryConfig.put("backoffMultiplier", retryPolicy.getBackoffMultiplier()); Map<String, Object> methodConfig = new HashMap<>(); methodConfig.put( "name", Collections.singletonList(Collections.singletonMap("service", serviceName))); methodConfig.put("retryPolicy", retryConfig); return Collections.singletonMap("methodConfig", Collections.singletonList(methodConfig)); }
<reponame>hozuki/kk-save-edit<filename>src/io/WriteOnlyByteArrayStream.ts<gh_stars>1-10 import PrototypeMixins from "../common/decorators/PrototypeMixins"; import ByteArrayStream from "./ByteArrayStream"; import IWriteableByteArrayStream from "./IWriteableByteArrayStream"; import {IWriteableByteArrayStreamEx, WriteableByteArrayStreamExImpl} from "./IWriteableByteArrayStreamEx"; import {stream$ensureCapacity, stream$setLength, stream$write} from "./private/RWHelper"; const DEFAULT_ARRAY_LENGTH = 32; @PrototypeMixins(WriteableByteArrayStreamExImpl) class WriteOnlyByteArrayStream extends ByteArrayStream implements IWriteableByteArrayStream { private constructor(array: Uint8Array, length: number, extensible: boolean) { super(array); this._length = length; this._extensible = extensible; } static new(): IWriteableByteArrayStreamEx { return new WriteOnlyByteArrayStream(new Uint8Array(DEFAULT_ARRAY_LENGTH), 0, true) as unknown as IWriteableByteArrayStreamEx; } static fromArray(array: Uint8Array, extensible: boolean = true): IWriteableByteArrayStreamEx { return new WriteOnlyByteArrayStream(array, array.length, extensible) as unknown as IWriteableByteArrayStreamEx; } write(buffer: Uint8Array): void; write(buffer: Uint8Array, offset: number, count: number): void; write(buffer: Uint8Array, offset: number = 0, count: number = buffer.length): void { stream$write.call(this, buffer, offset, count); } get capacity(): number { return this.array.length; } get length(): number { return this._length; } set length(v: number) { stream$setLength.call(this, v); } // Must be 'protected' (not private) to be called from extension 'this' function. protected ensureCapacity(value: number): void { stream$ensureCapacity.call(this, value); } // Again, must not be private. protected _length: number; protected _extensible: boolean; } interface WriteOnlyByteArrayStream extends IWriteableByteArrayStreamEx { } export default WriteOnlyByteArrayStream;
The Australian government has authorized dingo “baiting programs” which allow farmers, not just wildlife officials, to lay poisoned bait to kill them. Why? Because they could not think of anything else, and they just cannot think of any other way to placate the angry ranchers who complain of their sheep which are getting killed. Killing one species to save another doesn’t make sense, and with all of the technology, and progressive forward thinking out there today, this is the best the government can come up with. This poor approach is not only nonsensical, but it is also creating a huge problem for the balance of the ecosystem. Removing large carnivores can result in simultaneous population outbreaks of herbivores and smaller predators. And these population outbreaks, in turn, can have deadly effects on smaller mammals. Here’s how that works: The absence of dingos in forests allows big grass-eaters like kangaroos and wallabies to flourish, as well as red foxes. While the swelling ranks of foxes feast on small ground-dwelling mammals, decimating their numbers, the growing population of herbivores clears away ground cover, making it even harder for those animals to evade foxes. Which means more little dead mammals and an ecosystem to devastated to recover. Poisoning of dingoes is counter-productive for biodiversity conservation, and we need to take a stand to bring this silly approach to a stop. Ranchers and government need to be engaged and a more proactive solution brought forward where both farmers and their livestock, can live in harmony with the indigenous wildlife; a solution that does not destroy an entire ecosystem. Let’s send a clear message to the Australian government. We will not stand by while you use outdated and cruel methods to protect livestock. Make a change now before it is too late.
article Before Twitter announced plans to kill off Vine, the app's biggest stars reportedly banded together and pitched a plan they thought could save it. Continue Reading Below Mic reports that 18 of the app's top creators had a meeting with Vine's Creative Development Lead Karyn Spencer last fall, during which they demanded $1.2 million each to keep pumping out content for the platform. The meeting was organized by Marcus Johns and Piques, who, along with other Vine stars, "had noticed a sharp dropoff in engagement on the app," Mic reports. In exchange for the huge payouts, they each promised to create 12 pieces of original content a month, or three vines each week. "If Vine agreed, they could theoretically generate billions of views and boost engagement on a starving app," the report notes. "If they said no, all the top stars on the platform would walk." The group — which reportedly included Jerry Purpdrank, Christian Delgrosso, Curtis Lepore, Alx James, and DeStorm Power — also urged Vine to make several product changes and open "a more direct line of communication" with creators. They wanted Vine to do a better job of tackling harassment on the platform, the ability to add links to Vine captions, a better recommendation page, and more editing tools, and they wanted these things right away. Advertisement "They never made changes, or when they did it was too late," Piques told Mic. Vine rejected the proposal, and the stars stopped posting original content. And the rest, they say, is history. Meanwhile, word has it that two of Vine's cofounders — Rus Yusupov and Colin Kroll — are already on to their next project: a Periscope-like app called Hype. According to Fortune, Hype is a "live video broadcasting app that lets users add music and animations to their videos." The app is currently in beta. This article originally appeared on PCMag.com.
/** * Recover a file from a remote location and copy it into localDir * @param pRemoteFile file to recover (from a webdav repository) * @param pLocalDir directory to copy file to * @return File for the newly copied file, null if not copied */ public static File recoverFile(String pRemoteFile, String pLocalDir) { if(!pLocalDir.endsWith("/")) { pLocalDir+="/"; } String localFile = pLocalDir+new File(pRemoteFile).getName(); String remoteURI = getHTTPFileURI(gWebdavSettingsGet, pRemoteFile); HttpsURLConnection conn = null; try { conn = setupConnection(gWebdavSettingsGet, remoteURI, "GET"); conn.connect(); } catch (IOException e1) { e1.printStackTrace(); } long startTime = System.currentTimeMillis(); System.out.println("Copying: "+conn.getURL()+" -> "+localFile); long size = new Long(conn.getHeaderField("Content-Length")); String lastModified = conn.getHeaderField("Last-Modified"); SimpleDateFormat sdf = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz"); Date date = null; try { date = sdf.parse(lastModified); } catch (ParseException e) { e.printStackTrace(); } long count = 0; try { BufferedInputStream fis = new BufferedInputStream(conn.getInputStream()); FileOutputStream fos = new FileOutputStream(localFile); byte[] buffer = new byte[32768]; int bytesRead = 0; while(count<size) { bytesRead = fis.read(buffer); fos.write(buffer, 0, bytesRead); count += bytesRead; } fis.close(); fos.close(); } catch (IOException e) { e.printStackTrace(); } File outputFile = new File(localFile); outputFile.setLastModified(date.getTime()); System.out.println("Copied ["+count+"] bytes in ["+(System.currentTimeMillis()-startTime)+"] ms"); return outputFile; }
Giving birth to live young is one of the traits we use to distinguish mammals from other animals. But certain kinds of lizards, snakes and amphibians, both living and extinct, also reproduce without laying eggs. In fact, live birth (or viviparity) has evolved more than 100 separate times in non-mammal species throughout history. It seems to have been a common reproductive strategy in particular for extinct aquatic reptiles, such as the fish-like ichthyosaurs, plesiosaurs and mosasaurs that lived at the same time as the dinosaurs. But one group of animals known as Archosauromorpha, which includes crocodiles, birds and their ancestors the dinosaurs, has never been known to give birth – until now. A recently unearthed fossil, described in a new study by a team of scientists from China, the US, UK and Australia, shows that an ancient species of archosauromorph was giving birth about 245 million years ago. We’ll tell you what’s true. You can form your own view. From 15p €0.18 $0.18 $0.27 a day, more exclusives, analysis and extras. The newly described specimen belongs to the species Dinocephalosaurus orientalis and was found in sediments from the early Triassic period in Yunnan Province, China. Dinocephalosaurus had a body length of around a metre but with an elongated neck of nearly twice that length. Its skull was relatively small and equipped with needle-like teeth adapted to catch fish and squids. The shape of its skeleton suggests that it lived in water, but scientists originally thought it might have ventured onto land to lay eggs, as all known archosauromorphs do. The new fossil find now proves this theory to be wrong because it was found with an embryo preserved in its abdominal region. This provides compelling evidence for that this species of archosauromorphs gave birth to live young. Compelling evidence It is often hard to reconstruct a picture of an extinct creature and how it lived from a fossil, but a number of features strongly suggest that the new specimen does indeed represent a pregnant Dinocephalosaurus. The embryo’s skeleton has a very similar shape to the adult’s, showing that both belonged to the same species and that the embryo was in a very mature stage. The embryo is fully enclosed by the bones of the adult specimen and located in the pelvic region. Remains of a partially digested fish were also found further up and between the ribs of the adult. This not only helps us identify the mother’s last meal, but also rules out the possibility that the embryo might have been devoured prey. Shape Created with Sketch. Science news in pictures Show all 18 left Created with Sketch. right Created with Sketch. Shape Created with Sketch. Science news in pictures 1/18 Fabric that changes according to temperature created Scientists at the University of Maryland have created a fabric that adapts to heat, expanding to allow more heat to escape the body when warm and compacting to retain more heat when cold Faye Levine, University of Maryland 2/18 Baby mice tears could be used in pest control A study from the University of Tokyo has found that the tears of baby mice cause female mice to be less interested in the sexual advances of males Getty 3/18 Final warning to limit "climate catastrophe" The Intergovernmental Panel on Climate Change has issued a report which projects the impact of a rise in global temperatures of 1.5 degrees Celsius and warns against a higher increase Getty 4/18 Nobel prize for evolution chemists The nobel prize for chemistry has been awarded to three chemists working with evolution. Frances Smith is being awarded the prize for her work on directing the evolution of enzymes, while Gregory Winter and George Smith take the prize for their work on phage display of peptides and antibodies Getty/AFP 5/18 Nobel prize for laser physicists The nobel prize for physics has been awarded to three physicists working with lasers. Arthur Ashkin (L) was awarded for his "optical tweezers" which use lasers to grab particles, atoms, viruses and other living cells. Donna Strickland and Gérard Mourou were jointly awarded the prize for developing chirped-pulse amplification of lasers Reuters/AP 6/18 Discovery of a new species of dinosaur The Ledumahadi Mafube roamed around 200 million years ago in what is now South Africa. Recently discovered by a team of international scientists, it was the largest land animal of its time, weighing 12 tons and standing at 13 feet. In Sesotho, the South African language of the region in which the dinosaur was discovered, its name means "a giant thunderclap at dawn" Viktor Radermacher / SWNS 7/18 Birth of a planet Scientists have witnessed the birth of a planet for the first time ever. This spectacular image from the SPHERE instrument on ESO's Very Large Telescope is the first clear image of a planet caught in the very act of formation around the dwarf star PDS 70. The planet stands clearly out, visible as a bright point to the right of the center of the image, which is blacked out by the coronagraph mask used to block the blinding light of the central star. ESO/A. Müller et al 8/18 New human organ discovered that was previously missed by scientists Layers long thought to be dense, connective tissue are actually a series of fluid-filled compartments researchers have termed the “interstitium”. These compartments are found beneath the skin, as well as lining the gut, lungs, blood vessels and muscles, and join together to form a network supported by a mesh of strong, flexible proteins Getty 9/18 Previously unknown society lived in Amazon rainforest before Europeans arrived, say archaeologists Working in the Brazilian state of Mato Grosso, a team led by archaeologists at the University of Exeter unearthed hundreds of villages hidden in the depths of the rainforest. These excavations included evidence of fortifications and mysterious earthworks called geoglyphs José Iriarte 10/18 One in 10 people have traces of cocaine or heroin on fingerprints, study finds More than one in 10 people were found to have traces of class A drugs on their fingers by scientists developing a new fingerprint-based drug test. Using sensitive analysis of the chemical composition of sweat, researchers were able to tell the difference between those who had been directly exposed to heroin and cocaine, and those who had encountered it indirectly. Getty 11/18 Nasa releases stunning images of Jupiter's great red spot The storm bigger than the Earth, has been swhirling for 350 years. The image's colours have been enhanced after it was sent back to Earth. Pictures by: Tom Momary 12/18 A 3D reconstruction of an African grey parrot post euthanasia Included in Wellcome Image Awards, this 3D image of an African grey parrot shows the highly intricate system of blood vessels. Scott Birch. Wellcome Images 13/18 Baby Hawaiian bobtail squid Another Wellcome Images Award winner, this time of baby Hawaiian bobtail squid. The black ink sac and light organ in the centre of the squid’s mantle cavity can be clearly seen. Macroscopic Solutions. Wellcome Images 14/18 Skeletons of 5,000-year-old Chinese ‘giants’ discovered by archaeologists The people are thought to have been unusually tall and strong. The tallest of the skeletons uncovered measured at 1.9m YouTube 15/18 Nasa discovers 75,000 mile-wide hole in the Sun Sunspots are caused by interactions with the Sun’s magnetic field and are cooler areas on the star’s surface. Nasa 16/18 View(active tab) Apple News Breaking news email Edit Revisions Workflow Clear Cache NewsScience 132 million-year-old dinosaur fossil found at factory in Surrey Paleontologists Sarah Moore and Jamie Jordan believe they have discovered a Iguanodon dinosaur, a herbivore that was around three metres tall and 10 metres long Cambridge Photographers/Wienerberger 17/18 Discovering life on Mars is less likely as researchers find toxic chemicals on its surface The Echus Chasma, one of the largest water source regions on Mars Getty Images 18/18 An iris clip fitted onto the eye This images is apart of the Wellcome Images Awards and shows how an artificial intraocular lens is fitted onto the eye. Used for conditions such as myopia and cataracts. Cambridge University Hospitals NHS FT. Wellcome Images 1/18 Fabric that changes according to temperature created Scientists at the University of Maryland have created a fabric that adapts to heat, expanding to allow more heat to escape the body when warm and compacting to retain more heat when cold Faye Levine, University of Maryland 2/18 Baby mice tears could be used in pest control A study from the University of Tokyo has found that the tears of baby mice cause female mice to be less interested in the sexual advances of males Getty 3/18 Final warning to limit "climate catastrophe" The Intergovernmental Panel on Climate Change has issued a report which projects the impact of a rise in global temperatures of 1.5 degrees Celsius and warns against a higher increase Getty 4/18 Nobel prize for evolution chemists The nobel prize for chemistry has been awarded to three chemists working with evolution. Frances Smith is being awarded the prize for her work on directing the evolution of enzymes, while Gregory Winter and George Smith take the prize for their work on phage display of peptides and antibodies Getty/AFP 5/18 Nobel prize for laser physicists The nobel prize for physics has been awarded to three physicists working with lasers. Arthur Ashkin (L) was awarded for his "optical tweezers" which use lasers to grab particles, atoms, viruses and other living cells. Donna Strickland and Gérard Mourou were jointly awarded the prize for developing chirped-pulse amplification of lasers Reuters/AP 6/18 Discovery of a new species of dinosaur The Ledumahadi Mafube roamed around 200 million years ago in what is now South Africa. Recently discovered by a team of international scientists, it was the largest land animal of its time, weighing 12 tons and standing at 13 feet. In Sesotho, the South African language of the region in which the dinosaur was discovered, its name means "a giant thunderclap at dawn" Viktor Radermacher / SWNS 7/18 Birth of a planet Scientists have witnessed the birth of a planet for the first time ever. This spectacular image from the SPHERE instrument on ESO's Very Large Telescope is the first clear image of a planet caught in the very act of formation around the dwarf star PDS 70. The planet stands clearly out, visible as a bright point to the right of the center of the image, which is blacked out by the coronagraph mask used to block the blinding light of the central star. ESO/A. Müller et al 8/18 New human organ discovered that was previously missed by scientists Layers long thought to be dense, connective tissue are actually a series of fluid-filled compartments researchers have termed the “interstitium”. These compartments are found beneath the skin, as well as lining the gut, lungs, blood vessels and muscles, and join together to form a network supported by a mesh of strong, flexible proteins Getty 9/18 Previously unknown society lived in Amazon rainforest before Europeans arrived, say archaeologists Working in the Brazilian state of Mato Grosso, a team led by archaeologists at the University of Exeter unearthed hundreds of villages hidden in the depths of the rainforest. These excavations included evidence of fortifications and mysterious earthworks called geoglyphs José Iriarte 10/18 One in 10 people have traces of cocaine or heroin on fingerprints, study finds More than one in 10 people were found to have traces of class A drugs on their fingers by scientists developing a new fingerprint-based drug test. Using sensitive analysis of the chemical composition of sweat, researchers were able to tell the difference between those who had been directly exposed to heroin and cocaine, and those who had encountered it indirectly. Getty 11/18 Nasa releases stunning images of Jupiter's great red spot The storm bigger than the Earth, has been swhirling for 350 years. The image's colours have been enhanced after it was sent back to Earth. Pictures by: Tom Momary 12/18 A 3D reconstruction of an African grey parrot post euthanasia Included in Wellcome Image Awards, this 3D image of an African grey parrot shows the highly intricate system of blood vessels. Scott Birch. Wellcome Images 13/18 Baby Hawaiian bobtail squid Another Wellcome Images Award winner, this time of baby Hawaiian bobtail squid. The black ink sac and light organ in the centre of the squid’s mantle cavity can be clearly seen. Macroscopic Solutions. Wellcome Images 14/18 Skeletons of 5,000-year-old Chinese ‘giants’ discovered by archaeologists The people are thought to have been unusually tall and strong. The tallest of the skeletons uncovered measured at 1.9m YouTube 15/18 Nasa discovers 75,000 mile-wide hole in the Sun Sunspots are caused by interactions with the Sun’s magnetic field and are cooler areas on the star’s surface. Nasa 16/18 View(active tab) Apple News Breaking news email Edit Revisions Workflow Clear Cache NewsScience 132 million-year-old dinosaur fossil found at factory in Surrey Paleontologists Sarah Moore and Jamie Jordan believe they have discovered a Iguanodon dinosaur, a herbivore that was around three metres tall and 10 metres long Cambridge Photographers/Wienerberger 17/18 Discovering life on Mars is less likely as researchers find toxic chemicals on its surface The Echus Chasma, one of the largest water source regions on Mars Getty Images 18/18 An iris clip fitted onto the eye This images is apart of the Wellcome Images Awards and shows how an artificial intraocular lens is fitted onto the eye. Used for conditions such as myopia and cataracts. Cambridge University Hospitals NHS FT. Wellcome Images The embryo was also found in a curled-up position with the neck pointed towards the chest and forelimbs, which is a typical pose for vertebrate embryos. The withering of soft tissues such as muscles after an adult vertebrate dies means they are often fossilised in a typical death pose with the neck and tail arched back. In this case, the mother animal was preserved in exactly this position but not the embryo. This makes it unlikely that both fossils died at different times and came to lie on top of each other by coincidence. Together, this provides good evidence that the new fossil is indeed that of a pregnant Dinocephalosaurus and its embryo. Exceptionally preserved fossils, such as this one, allow a unique glimpse into the life of organisms over 245 million years ago, but they also have more far-reaching implications. Until now, it was assumed that all Archosauromorpha laid eggs. Modern members, represented by birds and crocodiles, are without exception oviparous (egg-laying), and fossilised eggs of dinosaurs and pterosaurs further supported this assumption. Finding a pregnant Dinocephalosaurus demonstrates that at least some extinct archosauromorphs were giving birth to living young. It also means our knowledge of how archosauromorphs reproduced goes back 50 million years further than was previously possible. Until now, our oldest relevant fossils of this group were dinosaur eggs from the early Jurassic period (about 190 million years ago). The results of this study also raise several questions. Viviparity has evolved independently and numerous times in all major types of vertebrate, with mammals probably the most prominent and successful example. Although giving birth is physically and energetically taxing for the parent, it has clear advantages for the offspring, which receives extra nutrients and protection, and develops without being affected by environmental conditions. Yet archosauromorphs evolved away from this reproductive strategy to become the egg-laying dinosaurs, and eventually crocodiles and birds that we know. Why was this? We will now have to hope that future fossil finds might reveal another piece to the evolutionary puzzle. Stephan Lautenschlager, lecturer in palaeobiology, University of Birmingham. This article first appeared on The Conversation (theconversation.com) We’ll tell you what’s true. You can form your own view. At The Independent, no one tells us what to write. That’s why, in an era of political lies and Brexit bias, more readers are turning to an independent source. Subscribe from just 15p a day for extra exclusives, events and ebooks – all with no ads. Subscribe now
Mission accomplished: a triumphant Ukip has achieved its goals and may as well pack up shop. The hard-right populist party is – in the words of its former leadership frontrunner Steven Woolfe – “in a death spiral”. Fresh from being literally hospitalised by the party’s internal divisions, Woolfe has marched out of this increasingly disorderly rabble. The party has been deprived of its toxic but effective figurehead, Nigel Farage, whose new pet project is spinning for Donald Trump; and with no clear unifying mission or leader, the party’s internal factions are abandoning any semblance of discipline. The party always represented a mishmash: some are rightwing libertarians, others anti-immigration national conservatives, others Tories-in-exile, others interested in forging a working-class rightwing populism. They are squabbling housemates wondering if they really belong under the same roof. But hey, a flailing Ukip shouldn’t mourn. It helped engineer the EU referendum in concert with Tory backbenchers, and now its founding purpose has been achieved. But more importantly, the Conservative party has been Ukip-ised. Farage praised Theresa May’s “remarkable speech”, crowing that his party had “changed the centre of gravity of British politics. Virtually everything she said in that speech are things that I’ve said to the Ukip conference over the course of the last five or six years”. He is right to brag. A party that won only one parliamentary seat last year has effectively captured the commanding heights of the Tory party. We now have a Conservative government pursuing a harsh and chaotic Brexit; demonising its critics as unpatriotic internal enemies; pursuing inflammatory anti-immigration ruses, such as floating the idea of drawing up lists of foreign workers; and pledging to expand selection in schools. If Ukip is dead, long live Ukip. Ukip could still live on in some form, of course. It could be the option on the ballot paper for those who want to hold May’s feet to the fire: that’s why she’s been so keen to stress that Brexit means Brexit. Under the previous leadership, Labour made a miscalculation in believing that Ukip’s rise represented a fracturing of the right that would benefit them. In the end, it was Labour’s traditional coalition that fractured most lethally: in the direction of the SNP, Greens and Ukip. Ukip’s turmoil certainly does underline the necessity for Labour to present a clear, coherent, inspiring vision. May’s alarmingly high poll rating is only partly explained by Labour’s decline: erstwhile Ukip voters flocking to the Tories is another reason. There are working-class Ukip voters who tell you they’d never vote Tory; May’s liberal use of “working class” is an attempt to address that. A significant chunk of working-class Britain feels alienated from Labour. That has to be addressed with a clear vision and a message communicated in language that resonates with people outside the political bubble. These are febrile times and Labour has an opportunity. If the party blows it, it won’t only be the Tories who are Ukip-ised – it will be a fate that befalls the whole country.
use std::io::BufRead; const DEBUG: bool = false; fn main() { let mut f = read_field(); let mut col = 0; while col < f.w as usize { while col < f.w as usize && f.is_column_empty(col) { f.remove_column(col); } col += 1; } let mut row = 0; while row < f.h as usize { while row < f.h as usize && f.is_row_empty(row) { f.remove_row(row); } row += 1; } f.show(); } fn read_field() -> Field { let stdin = std::io::stdin(); let mut input = stdin.lock(); let mut l = String::new(); let _ = input.read_line(&mut l); let hw: Vec<u32> = l .split_whitespace() .map(|n| n.parse().expect("readnumerror!")) .collect(); let h = hw[0]; let w = hw[1]; let mut f: Vec<Cell> = Vec::with_capacity((h * w) as usize); for line in input.lines() { for c in line.expect("read line error").trim().chars() { match c { '.' => f.push(false), '#' => f.push(true), _ => panic!(format!("unexpected char {}", c)), } } } Field { h: h, w: w, field: f, } } struct Field { h: u32, w: u32, field: Vec<Cell>, } impl Field { fn is_column_empty(&self, n: usize) -> bool { let mut i = n; while i < self.field.len() { if self.field[i] { return false; } i += self.w as usize; } true // !self // .field // .iter() // .skip(n) // .step_by(self.w as usize) // .any(|&b| b) } fn is_row_empty(&self, n: usize) -> bool { if DEBUG { println!( "DEBUG: n {} f w{:?} h{:?}, f{:?}", n, self.w, self.h, self.field ); } !self .field .iter() .skip(n * self.w as usize) .take(self.w as usize) .any(|&b| b) } fn remove_column(&mut self, n: usize) { if DEBUG { self.show(); println!("|||||||||||@@@@"); } let mut l = n; for _ in 0..self.h { self.field.remove(l); l += self.w as usize - 1; } self.w -= 1; } fn remove_row(&mut self, n: usize) { if DEBUG { self.show(); println!("@@@@@@@@@@@@@@@"); } for _ in 0..self.w { self.field.remove(n * self.w as usize); } self.h -= 1; } fn show(&self) { for y in 0..self.h as usize { let line: String = self.field[y * self.w as usize..(y + 1) * self.w as usize] .iter() .map(|&c| if c { '#' } else { '.' }) .collect(); println!("{}", line); } } } type Cell = bool;
/** * Creates elemental and semantic vectors for each concept, and elemental vectors for predicates. * * @throws IOException */ private void initialize() throws IOException { if (this.luceneUtils == null) { this.luceneUtils = new LuceneUtils(flagConfig); } elementalItemVectors = new ElementalVectorStore(flagConfig); semanticItemVectors = new VectorStoreRAM(flagConfig); elementalPredicateVectors = new ElementalVectorStore(flagConfig); semanticPredicateVectors = new VectorStoreRAM(flagConfig); flagConfig.setContentsfields(itemFields); HashSet<String> addedConcepts = new HashSet<String>(); int tc = 0; for (String fieldName : itemFields) { Terms terms = luceneUtils.getTermsForField(fieldName); if (terms == null) { throw new NullPointerException(String.format( "No terms for field '%s'. Please check that index at '%s' was built correctly for use with PSI.", fieldName, flagConfig.luceneindexpath())); } TermsEnum termsEnum = terms.iterator(); BytesRef bytes; while((bytes = termsEnum.next()) != null) { Term term = new Term(fieldName, bytes); if (!luceneUtils.termFilter(term)) { VerbatimLogger.fine("Filtering out term: " + term + "\n"); continue; } if (!addedConcepts.contains(term.text())) { addedConcepts.add(term.text()); elementalItemVectors.getVector(term.text()); semanticItemVectors.putVector(term.text(), VectorFactory.createZeroVector( flagConfig.vectortype(), flagConfig.dimension())); tc++; if ((tc > 0) && ((tc % 10000 == 0) || ( tc < 10000 && tc % 1000 == 0 ))) { VerbatimLogger.info("Initialized " + tc + " term vectors ... "); } } } } Terms predicateTerms = luceneUtils.getTermsForField(PREDICATE_FIELD); String[] dummyArray = new String[] { PREDICATE_FIELD }; TermsEnum termsEnum = predicateTerms.iterator(); BytesRef bytes; while((bytes = termsEnum.next()) != null) { Term term = new Term(PREDICATE_FIELD, bytes); if (!luceneUtils.termFilter(term, dummyArray, 0, Integer.MAX_VALUE, Integer.MAX_VALUE, 1)) { continue; } elementalPredicateVectors.getVector(term.text().trim()); if (flagConfig.trainingcycles() > 0) semanticPredicateVectors.putVector(term.text().trim(), VectorFactory.createZeroVector( flagConfig.vectortype(), flagConfig.dimension())); elementalPredicateVectors.getVector(term.text().trim() + "-INV"); if (flagConfig.trainingcycles() > 0) semanticPredicateVectors.putVector(term.text().trim() + "-INV", VectorFactory.createZeroVector( flagConfig.vectortype(), flagConfig.dimension())); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. //! Serde code to convert from protocol buffers to Rust data structures. use std::convert::{TryFrom, TryInto}; use std::sync::Arc; use crate::error::BallistaError; use crate::serde::{from_proto_binary_op, proto_error, protobuf}; use crate::{convert_box_required, convert_required}; use chrono::{TimeZone, Utc}; use datafusion::catalog::catalog::{CatalogList, MemoryCatalogList}; use datafusion::datasource::object_store::local::LocalFileSystem; use datafusion::datasource::object_store::{FileMeta, ObjectStoreRegistry, SizedFile}; use datafusion::datasource::PartitionedFile; use datafusion::execution::context::{ ExecutionConfig, ExecutionContextState, ExecutionProps, }; use datafusion::execution::runtime_env::RuntimeEnv; use datafusion::physical_plan::file_format::FileScanConfig; use datafusion::physical_plan::window_functions::WindowFunction; use datafusion::physical_plan::{ expressions::{ BinaryExpr, CaseExpr, CastExpr, Column, InListExpr, IsNotNullExpr, IsNullExpr, Literal, NegativeExpr, NotExpr, TryCastExpr, DEFAULT_DATAFUSION_CAST_OPTIONS, }, functions::{self, ScalarFunctionExpr}, Partitioning, }; use datafusion::physical_plan::{ColumnStatistics, PhysicalExpr, Statistics}; use protobuf::physical_expr_node::ExprType; impl From<&protobuf::PhysicalColumn> for Column { fn from(c: &protobuf::PhysicalColumn) -> Column { Column::new(&c.name, c.index as usize) } } impl TryFrom<&protobuf::PhysicalExprNode> for Arc<dyn PhysicalExpr> { type Error = BallistaError; fn try_from(expr: &protobuf::PhysicalExprNode) -> Result<Self, Self::Error> { let expr_type = expr .expr_type .as_ref() .ok_or_else(|| proto_error("Unexpected empty physical expression"))?; let pexpr: Arc<dyn PhysicalExpr> = match expr_type { ExprType::Column(c) => { let pcol: Column = c.into(); Arc::new(pcol) } ExprType::Literal(scalar) => { Arc::new(Literal::new(convert_required!(scalar.value)?)) } ExprType::BinaryExpr(binary_expr) => Arc::new(BinaryExpr::new( convert_box_required!(&binary_expr.l)?, from_proto_binary_op(&binary_expr.op)?, convert_box_required!(&binary_expr.r)?, )), ExprType::AggregateExpr(_) => { return Err(BallistaError::General( "Cannot convert aggregate expr node to physical expression" .to_owned(), )); } ExprType::WindowExpr(_) => { return Err(BallistaError::General( "Cannot convert window expr node to physical expression".to_owned(), )); } ExprType::Sort(_) => { return Err(BallistaError::General( "Cannot convert sort expr node to physical expression".to_owned(), )); } ExprType::IsNullExpr(e) => { Arc::new(IsNullExpr::new(convert_box_required!(e.expr)?)) } ExprType::IsNotNullExpr(e) => { Arc::new(IsNotNullExpr::new(convert_box_required!(e.expr)?)) } ExprType::NotExpr(e) => { Arc::new(NotExpr::new(convert_box_required!(e.expr)?)) } ExprType::Negative(e) => { Arc::new(NegativeExpr::new(convert_box_required!(e.expr)?)) } ExprType::InList(e) => Arc::new(InListExpr::new( convert_box_required!(e.expr)?, e.list .iter() .map(|x| x.try_into()) .collect::<Result<Vec<_>, _>>()?, e.negated, )), ExprType::Case(e) => Arc::new(CaseExpr::try_new( e.expr.as_ref().map(|e| e.as_ref().try_into()).transpose()?, e.when_then_expr .iter() .map(|e| { Ok(( convert_required!(e.when_expr)?, convert_required!(e.then_expr)?, )) }) .collect::<Result<Vec<_>, BallistaError>>()? .as_slice(), e.else_expr .as_ref() .map(|e| e.as_ref().try_into()) .transpose()?, )?), ExprType::Cast(e) => Arc::new(CastExpr::new( convert_box_required!(e.expr)?, convert_required!(e.arrow_type)?, DEFAULT_DATAFUSION_CAST_OPTIONS, )), ExprType::TryCast(e) => Arc::new(TryCastExpr::new( convert_box_required!(e.expr)?, convert_required!(e.arrow_type)?, )), ExprType::ScalarFunction(e) => { let scalar_function = datafusion_proto::protobuf::ScalarFunction::from_i32(e.fun) .ok_or_else(|| { proto_error(format!( "Received an unknown scalar function: {}", e.fun, )) })?; let args = e .args .iter() .map(|x| x.try_into()) .collect::<Result<Vec<_>, _>>()?; let catalog_list = Arc::new(MemoryCatalogList::new()) as Arc<dyn CatalogList>; let ctx_state = ExecutionContextState { catalog_list, scalar_functions: Default::default(), aggregate_functions: Default::default(), config: ExecutionConfig::new(), execution_props: ExecutionProps::new(), object_store_registry: Arc::new(ObjectStoreRegistry::new()), runtime_env: Arc::new(RuntimeEnv::default()), }; let fun_expr = functions::create_physical_fun( &(&scalar_function).into(), &ctx_state.execution_props, )?; Arc::new(ScalarFunctionExpr::new( &e.name, fun_expr, args, &convert_required!(e.return_type)?, )) } }; Ok(pexpr) } } impl TryFrom<&protobuf::physical_window_expr_node::WindowFunction> for WindowFunction { type Error = BallistaError; fn try_from( expr: &protobuf::physical_window_expr_node::WindowFunction, ) -> Result<Self, Self::Error> { match expr { protobuf::physical_window_expr_node::WindowFunction::AggrFunction(n) => { let f = datafusion_proto::protobuf::AggregateFunction::from_i32(*n) .ok_or_else(|| { proto_error(format!( "Received an unknown window aggregate function: {}", n )) })?; Ok(WindowFunction::AggregateFunction(f.into())) } protobuf::physical_window_expr_node::WindowFunction::BuiltInFunction(n) => { let f = datafusion_proto::protobuf::BuiltInWindowFunction::from_i32(*n) .ok_or_else(|| { proto_error(format!( "Received an unknown window builtin function: {}", n )) })?; Ok(WindowFunction::BuiltInWindowFunction(f.into())) } } } } pub fn parse_protobuf_hash_partitioning( partitioning: Option<&protobuf::PhysicalHashRepartition>, ) -> Result<Option<Partitioning>, BallistaError> { match partitioning { Some(hash_part) => { let expr = hash_part .hash_expr .iter() .map(|e| e.try_into()) .collect::<Result<Vec<Arc<dyn PhysicalExpr>>, _>>()?; Ok(Some(Partitioning::Hash( expr, hash_part.partition_count.try_into().unwrap(), ))) } None => Ok(None), } } impl TryFrom<&protobuf::PartitionedFile> for PartitionedFile { type Error = BallistaError; fn try_from(val: &protobuf::PartitionedFile) -> Result<Self, Self::Error> { Ok(PartitionedFile { file_meta: FileMeta { sized_file: SizedFile { path: val.path.clone(), size: val.size, }, last_modified: if val.last_modified_ns == 0 { None } else { Some(Utc.timestamp_nanos(val.last_modified_ns as i64)) }, }, partition_values: val .partition_values .iter() .map(|v| v.try_into()) .collect::<Result<Vec<_>, _>>()?, }) } } impl TryFrom<&protobuf::FileGroup> for Vec<PartitionedFile> { type Error = BallistaError; fn try_from(val: &protobuf::FileGroup) -> Result<Self, Self::Error> { val.files .iter() .map(|f| f.try_into()) .collect::<Result<Vec<_>, _>>() } } impl From<&protobuf::ColumnStats> for ColumnStatistics { fn from(cs: &protobuf::ColumnStats) -> ColumnStatistics { ColumnStatistics { null_count: Some(cs.null_count as usize), max_value: cs.max_value.as_ref().map(|m| m.try_into().unwrap()), min_value: cs.min_value.as_ref().map(|m| m.try_into().unwrap()), distinct_count: Some(cs.distinct_count as usize), } } } impl TryInto<Statistics> for &protobuf::Statistics { type Error = BallistaError; fn try_into(self) -> Result<Statistics, Self::Error> { let column_statistics = self .column_stats .iter() .map(|s| s.into()) .collect::<Vec<_>>(); Ok(Statistics { num_rows: Some(self.num_rows as usize), total_byte_size: Some(self.total_byte_size as usize), // No column statistic (None) is encoded with empty array column_statistics: if column_statistics.is_empty() { None } else { Some(column_statistics) }, is_exact: self.is_exact, }) } } impl TryInto<FileScanConfig> for &protobuf::FileScanExecConf { type Error = BallistaError; fn try_into(self) -> Result<FileScanConfig, Self::Error> { let schema = Arc::new(convert_required!(self.schema)?); let projection = self .projection .iter() .map(|i| *i as usize) .collect::<Vec<_>>(); let projection = if projection.is_empty() { None } else { Some(projection) }; let statistics = convert_required!(self.statistics)?; Ok(FileScanConfig { object_store: Arc::new(LocalFileSystem {}), file_schema: schema, file_groups: self .file_groups .iter() .map(|f| f.try_into()) .collect::<Result<Vec<_>, _>>()?, statistics, projection, limit: self.limit.as_ref().map(|sl| sl.limit as usize), table_partition_cols: vec![], }) } }
def DSP_info(): sr = ctypes.c_int() fmt = ctypes.c_int() out_ch = ctypes.c_int() in_ch = ctypes.c_int() resampler = ctypes.c_int() bits = ctypes.c_int() call_fmod("FMOD_System_GetSoftwareFormat", ctypes.byref(sr), ctypes.byref(fmt), ctypes.byref(out_ch), ctypes.byref(in_ch), ctypes.byref(resampler), ctypes.byref(bits)) result = {'sample_rate':sr.value, "format":fmt.value, "out_channels":out_ch.value, "in_channels":in_ch.value, "resampler":resampler.value, "bits":bits.value} return result
import { doShowAlert, doHideAlert } from 'app/ui/actions'; import { UIActionTypes } from 'app/ui/types'; import reducer from '.'; describe('alert reducer', () => { it('should return the initial state', () => { const expectedState = { message: '', isVisible: false, }; expect(reducer(undefined, {} as UIActionTypes)).toEqual(expectedState); }); it('should handle ALERT_SHOW', () => { // Arrange const message = 'Invalid credentials'; const expectedState = { message, isVisible: true, }; // Act const action = doShowAlert(message); // Assert expect(reducer(undefined, action)).toEqual(expectedState); }); it('should handle ALERT_HIDE', () => { // Arrange const expectedState = { message: '', isVisible: false, }; // Act const action = doHideAlert(); // Assert expect(reducer(undefined, action)).toEqual(expectedState); }); });
Effects of Diet and Exercise on Peripheral Vascular Disease. In brief A 46-year-old man presented with symptoms of peripheral vascular disease in 1966. In 1976 arteriography revealed 100% occlusion of both femoral arteries at midthigh and some reconstitution of flow via collaterals into the popliteal region. His cholesterol level was initially 407 mg/100 ml, and his walking tolerance was 100 yards. After a 26-day stay at the Pritikin Longevity Center, his cholesterol dropped from 230 mg/100 ml to 130 mg/100 ml, and his walking tolerance increased to 3 miles in one hour with little leg pain. He has run more than 20 road races and completed a marathon. A recent exercise Doppler exam and a second arteriogram indicated a significant increase in blood flow due to dilation of deep femoral arteries and existing collateral vessels.
/** * Created by root on 5/3/17. */ public class MyVariant implements Serializable { private String bases; //private String genotype; private String alleles; private String reference; public String getBases() { return bases; } public void setBases(String bases) { this.bases = bases; } public String getAlleles() { return alleles; } public void setAlleles(String alleles) { this.alleles = alleles; } public String getReference() { return reference; } public void setReference(String reference) { this.reference = reference; } /*public void setGenotype(String genotype) { this.genotype = genotype; } public String getGenotype() { return genotype; }*/ }
use std::io::{self, Read, Write}; use termion::{clear, cursor}; use termion::color as termcol; use termion::event::Key; use termion::input::TermRead; use board::{self, Board, Coord, Move, Tile}; use error; const BOARD_WIDTH: u16 = 32; const BOARD_HEIGHT: u16 = 12; const SLEEP_DURATION: u64 = 500; enum GameResult { RedWin, RedLoss, Draw, Ongoing } struct Game<R, W: Write> { board: Board, cursor: Coord, sel: Option<Coord>, highlighted: Vec<Coord>, size: (u16, u16), stdin: R, stdout: W } impl<R, W: Write> Drop for Game<R, W> { fn drop(&mut self) { write!( self.stdout, "{}{}{}", clear::All, cursor::Show, cursor::Goto(1, 1) ).unwrap() } } pub fn init<R: Read, W: Write>(stdin: R, mut stdout: W, size: (u16, u16)) -> error::Result<()> { write!(stdout, "{}", clear::All)?; let mut game = Game { board: Board::new(), cursor: Coord { x: 0, y: 9 }, sel: None, highlighted: vec![], size: size, stdin: stdin.keys(), stdout: stdout, }; game.setup(board::Colour::Red)?; game.board.randomise(board::Colour::Blue); // game.board.randomise(board::Colour::Red); game.refresh(board::Colour::Red)?; game.run() } impl<R: Iterator<Item = Result<Key, io::Error>>, W: Write> Game<R, W> { /// The main game loop. pub fn run(&mut self) -> error::Result<()> { let mut player = board::Colour::Red; macro_rules! mv { ($x:expr, $y:expr) => (match self.cursor.offset($x, $y) { Some(c) => c, None => self.cursor }); } while let Ok(k) = self.stdin.next().unwrap() { use termion::event::Key::*; match k { Char('w') | Up => self.cursor = mv!(0, -1), Char('a') | Left => self.cursor = mv!(-1, 0), Char('s') | Down => self.cursor = mv!(0, 1), Char('d') | Right => self.cursor = mv!(1, 0), Char('q') => break, Char(' ') | Char('\n') => { match self.sel { Some(selected) => { if self.highlighted.contains(&self.cursor) { // Conduct the move. match self.board.tile_at(self.cursor) { // Show the piece attempting to be taken, // then conduct the results. Tile::Piece(p_enemy, _) => { use board::BattleResult::*; if let Tile::Piece(p_owned, _) = self.board.tile_at(selected) { match p_owned.attack(p_enemy) { // FIXME Probably some way // of doing this without // reallocation, even if // it's just making `reveal` // take a mutable. Victory => { let cur = self.cursor; self.reveal(cur, player)?; self.board.apply_move(Move::new(selected, self.cursor)); }, Loss => { let cur = self.cursor; self.reveal(cur, player)?; self.board.set_tile(selected, Tile::Empty); } Draw => { let cur = self.cursor; self.reveal(cur, player)?; self.board.set_tile(selected, Tile::Empty); self.board.set_tile(self.cursor, Tile::Empty); } } } } // Else just move. _ => self.board .apply_move(Move { from: selected, to: self.cursor }), } player = player.other(); } self.sel = None; self.highlighted.clear(); } None => { if let Tile::Piece(_, col) = self.board.tile_at(self.cursor) { if player != col { continue } } // Highlight valid spaces let moves = self.board.find_moves(self.cursor); let coords = moves.iter().map(|m| m.to).collect::<Vec<_>>(); if !coords.is_empty() { self.highlighted = coords; self.sel = Some(self.cursor); } } } } _ => () } self.refresh(player)?; } Ok(()) } /// Prompts the user to set up their side of the board. /// /// By default, places pieces in order valued highest to lowest, with /// stationary pieces first (i.e., flag, bombs, marshall, general, ...). fn setup(&mut self, player: board::Colour) -> error::Result<()> { use board::Piece::*; let mut to_place = vec![ Flag, Bomb, Bomb, Bomb, Bomb, Bomb, Bomb, Marshall, General, Colonel, Colonel, Major, Major, Major, Captain, Captain, Captain, Captain, Lieutenant, Lieutenant, Lieutenant, Lieutenant, Sergeant, Sergeant, Sergeant, Sergeant, Miner, Miner, Miner, Miner, Miner, Scout, Scout, Scout, Scout, Scout, Scout, Scout, Scout, Spy ]; macro_rules! mv { ($x:expr, $y:expr) => (match self.cursor.offset($x, $y) { Some(c) => c, None => self.cursor }); } let offset = match player { board::Colour::Red => 6, board::Colour::Blue => 0, }; for x in 0 .. 10 { for y in 0 .. 4 { let coord = Coord {x: x, y: y + offset}; self.highlighted.push(coord); } } self.draw_status( format!("Next to place: {}", to_place[0]) )?; self.refresh(player)?; while let Ok(k) = self.stdin.next().unwrap() { use termion::event::Key::*; match k { Char('w') | Up => self.cursor = mv!(0, -1), Char('a') | Left => self.cursor = mv!(-1, 0), Char('s') | Down => self.cursor = mv!(0, 1), Char('d') | Right => self.cursor = mv!(1, 0), Char('q') => return Err(error::Error::EarlyExit), Char('e') => { let last = to_place[0]; to_place.push(last); to_place.remove(0); } Char(' ') | Char('\n') => { if self.highlighted.contains(&self.cursor) { let piece = to_place[0]; let tile = Tile::Piece(piece, player); self.board.set_tile(self.cursor, tile); self.highlighted.remove_item(&self.cursor); to_place.remove(0); } } _ => {} } self.refresh(player)?; if to_place.is_empty() { break } else { self.draw_status( format!("Next to place: {}", to_place[0]) )?; self.stdout.flush()?; } } Ok(()) } fn refresh(&mut self, player: board::Colour) -> error::Result<()> { self.draw_board(player)?; self.highlight()?; self.draw_cursor(player)?; self.stdout.flush()?; Ok(()) } fn term_coords(&self, c: Coord) -> (u16, u16) { let tl = self.top_left(); ((c.x + 1) * 3 + tl.0, c.y + 2 + tl.1) } fn draw_status<D>(&mut self, status: D) -> error::Result<()> where D: ::std::fmt::Display { let tl = self.top_left(); write!(self.stdout, "{}{}", cursor::Goto(tl.0 + 1, tl.1 + 1 + BOARD_HEIGHT), status )?; Ok(()) } fn top_left(&self) -> (u16, u16) { ((self.size.0 - BOARD_WIDTH) / 2, (self.size.1 - BOARD_HEIGHT) / 2) } fn draw_board(&mut self, player: board::Colour) -> error::Result<()> { let tl = self.top_left(); for (n, line) in self.board .display_to(player) .unwrap() .split('\n') .enumerate() { write!(self.stdout, "{}{}{}", cursor::Goto(1 + tl.0, 1 + tl.1 + n as u16), cursor::Hide, // self.size, line )? } Ok(()) } fn draw_cursor(&mut self, player: board::Colour) -> error::Result<()> { let (x, y) = self.term_coords(self.cursor); let cursor = format!("[{}]", self.board.tile_at(self.cursor).show(player)); write!(self.stdout, "{}", cursor::Goto(x - 1, y))?; if self.highlighted.contains(&self.cursor) { write!(self.stdout, "{}{}{}", termcol::Bg(termcol::Red), cursor, termcol::Bg(termcol::Reset) )?; } else { write!(self.stdout, "{}", cursor)?; }; Ok(()) } fn highlight(&mut self) -> error::Result<()> { for t in &self.highlighted { let (x, y) = self.term_coords(t.clone()); write!(self.stdout, "{}{} {}", cursor::Goto(x - 1, y), termcol::Bg(termcol::Red), termcol::Bg(termcol::Reset) )?; } Ok(()) } pub fn reveal(&mut self, c: Coord, player: board::Colour) -> error::Result<()> { if let Tile::Piece(p, col) = self.board.tile_at(c) { self.board.set_tile(c, Tile::Piece(p, col.other())); self.refresh(player)?; ::std::thread::sleep(::std::time::Duration::from_millis(SLEEP_DURATION)); self.board.set_tile(c, Tile::Piece(p, col)); self.refresh(player)?; } Ok(()) } fn popup(&mut self, text: &str) -> error::Result<()> { Ok(()) } fn check_game_end(&mut self) -> GameResult { let mut red = vec![]; let mut blue = vec![]; for line in &self.board { for tile in line { if let &Tile::Piece(piece, col) = tile { match col { board::Colour::Red => red.push(piece), board::Colour::Blue => blue.push(piece), } } } } // Flags if !red.contains(&board::Piece::Flag) { return GameResult::RedLoss } else if !blue.contains(&board::Piece::Flag) { return GameResult::RedWin } // Only immobile units if red.iter().fold(true, |acc, &p| acc && (p == board::Piece::Flag || p == board::Piece::Bomb)) { return GameResult::RedLoss } else if blue.iter().fold(true, |acc, &p| acc && (p == board::Piece::Flag || p == board::Piece::Bomb)) { return GameResult::RedWin } GameResult::Ongoing } }
<filename>scripts/examples/Arduino/Portenta-H7/38-Ethernet/eth_cable_test.py # Ethernet Cable Status Example. # # This example prints the cable connection status. import network, time lan = network.LAN() # Make sure Eth is not in low-power mode. lan.config(low_power=False) # Delay for auto negotiation time.sleep(3.0) while (True): print("Cable is", "connected." if lan.status() else "disconnected.") time.sleep(1.0) # Put Eth back in low-power mode if needed. #lan.config(low_power=True)
/* ------------------------------------------------------------ HEXNIBBLEOUT gibt die unteren 4 Bits eines chars als Hexaziffer aus. Eine Pruefung ob die oberen vier Bits geloescht sind erfolgt NICHT ! ------------------------------------------------------------- */ void hexnibbleout(uint8_t b) { if (b< 10) b+= '0'; else b+= 55; my_putchar(b); }
<reponame>healthpackdev/website<gh_stars>10-100 import type { MDXRemoteSerializeResult } from 'next-mdx-remote'; import { serialize } from 'next-mdx-remote/serialize'; import fs from 'fs'; import path from 'path'; import matter from 'gray-matter'; import readingTime from 'reading-time'; import fg from 'fast-glob'; import dayjs from 'dayjs'; import { rehypeSyntaxHighlight } from './mdx-plugins'; const remarkPlugins = [ require('remark-slug'), [require('remark-autolink-headings'), { linkProperties: { className: ['anchor'] } }], ]; const rehypePlugins = [rehypeSyntaxHighlight]; const processRoot = process.cwd(); const contentDir = path.join(processRoot, 'content'); export interface BlogPost { data: { title: string; image: string; description: string; publishedAt: number; minRead: string; }; mdxSource: MDXRemoteSerializeResult; slug: string; } export type BlogPostMatter = Omit<BlogPost, 'mdxSource'>; const map = fg.sync('**.mdx', { cwd: contentDir }).map((f) => f.replace(path.extname(f), '')); const matters = { blog(source: string) { let { data: frontmatter, content } = matter(source); const minReadDuration = dayjs.duration(readingTime(content).time); const seconds = minReadDuration.asSeconds(); const isMinute = seconds > 60; const data = { ...frontmatter, minRead: isMinute ? `${minReadDuration.asMinutes().toFixed()} dakika` : `${seconds.toFixed()} saniye`, publishedAt: frontmatter.publishedAt || null, }; return { data, content, }; }, default(source: string) { const rest = matter(source); return rest; }, }; const generators = { async blog(source: string, slug: string) { const { data, content } = matters.blog(source); const mdxSource = await serialize(content, { mdxOptions: { remarkPlugins, rehypePlugins } }); return { data, mdxSource, slug, }; }, async default(source: string, slug: string) { const { data, content } = matters.default(source); const mdxSource = await serialize(content, { mdxOptions: { remarkPlugins, rehypePlugins } }); return { data, mdxSource, slug }; }, }; export const getParams = (type: string) => { const slugs = map .filter((fileName) => { return fileName.split('/')[0] === type; }) .map((fileName) => fileName.replace(`${type}/`, '')); return slugs; }; export const getByPath = async (type: string, ...pathname: string[]) => { const full = path.join(type, ...pathname); const slug = map.find((f) => f === full); const fileSource = fs.readFileSync(path.join(contentDir, `${slug}.mdx`), 'utf8'); const fn = generators[type] ? generators[type] : generators.default; return await fn(fileSource, slug); }; export const getMatters = (type: string) => { const slugs = getParams(type); return slugs.map((slug) => { const source = fs.readFileSync(path.join(contentDir, type, `${slug}.mdx`), 'utf-8'); const data = matters[type](source, slug); return { ...data, slug }; }, []); };
/** * Rellena la matriz con valores ingresados por teclado * @param int[][] Matriz a rellenar */ void rellenar_matriz(int matriz[FIL][COL]) { for(int i = 0; i < FIL; i++) { for(int j = 0; j < COL; j++) { cout << "matriz[" << i << "][" << j << "] = "; cin >> matriz[i][j]; } cout << endl; } }
def addPasswords(self): get = Addpassdialog() get.setWindowTitle('Add Account') get.setStyleSheet("background-color: rgba(84, 84, 84, 0.5);") state = get.exec_() login = get.linelogin.text() password = get.linepassword.text() description = get.linedescription.text() userid = get.lineid.text() if userid == '' or userid in self.data.keys(): Pyssview.msgBox(self, msg='User ID already exists or incorrect.') else: if state != 0: self.data[userid] = [login, password, description] Pyssview.__displayPassword(self, login, password, description, objName=userid, userid=userid)
Declining prices of vegetable and other food articles pulled down wholesale inflation sharply to 3.74% in August to a nearly five-year low. The inflation measured on Wholesale Price Index (WPI) was at 5.19% in July and 6.99% in August 2013. Inflation in the food segment witnessed a significant decline to 5.15% in August as against 8.43% in the previous month, according to official data released here today. The August WPI inflation is the lowest since October 2009 when it stood at 1.8%. Vegetable prices contracted 4.88%, the third continuous month of decline. Maintaining a downward trend, the onion prices contracted by 44.7 % during the month under review. However, potato prices were on the rise as inflation in the kitchen essential jumped to 61.61% from 46.41% in July. Inflation in the fruits basket eased to 20.31% in August. While prices of protein rich items like egg, meat and fish contracted during the month, inflation in milk and pulses inched up to 12.18% and 7.81%, respectively, as compared to July. The August retail inflation too eased to 7.8% compared to 7.96% in July. The wholesale WPI data further revealed that the price rise in manufactured goods, like sugar and edible oils too eased to 3.45% in August, while it was 3.67% in July. Inflation in the fuel and power segment which include LPG, petrol and diesel declined to 4.54% as compared to price rise of 7.40% seen in July. Meanwhile, wholesale inflation based on final index for June has been revised upwards to 5.66% from the provisional estimate of 5.43%. The August WPI data is also provisional, the government said. It also said the build up inflation rate in the financial year till August is 3% compared to a build up rate of 5.23% in the same period of 2013-14.
<gh_stars>0 import {isEmpty} from 'lodash'; import {AppState} from '../AppState'; import {adjustAvailables} from './helpers/adjustAvailables'; import {adjustOptionalDistanceAndHook} from './helpers/adjustOptionalDistanceAndHook'; import {assignTipToCorrectArray} from './helpers/assignTipToCorrectArray'; import {hasCompletedTip} from './helpers/hasCompletedTip'; import {sortLocations} from './helpers/sortLocations'; export function handleSubmitTip(state: AppState, tip: Obj<any>): AppState { if (isEmpty(tip) || hasCompletedTip(state.completeLocations, tip)) { return state; } let out = assignTipToCorrectArray(state, tip); if (out !== state) { out = adjustOptionalDistanceAndHook(out, tip); out = adjustAvailables(out); // sort for (const k of ['completeLocations', 'incompleteLocations'] as ['completeLocations', 'incompleteLocations']) { if (out[k] !== state[k]) { out[k] = sortLocations<any>(out[k]); } } } return out; }
// A basic test fixture for User Data downgrade tests that writes a test file // and a "Last Version" file into User Data in the pre-relaunch case. The former // is used to validate move-and-delete processing on downgrade, while the latter // is to simulate a previous run of a higher version of the browser. The fixture // is expected to be used in a PRE_ and a regular test, with IsPreTest used to // distinguish these cases at runtime. class UserDataDowngradeBrowserTestBase : public InProcessBrowserTest { protected: static bool IsPreTest() { const base::StringPiece test_name( ::testing::UnitTest::GetInstance()->current_test_info()->name()); return test_name.find("PRE_") != base::StringPiece::npos; } static std::string GetNextChromeVersion() { return base::NumberToString(version_info::GetVersion().components()[0] + 1); } UserDataDowngradeBrowserTestBase() : root_key_(install_static::IsSystemInstall() ? HKEY_LOCAL_MACHINE : HKEY_CURRENT_USER) {} HKEY root_key() const { return root_key_; } const base::FilePath& user_data_dir() const { return user_data_dir_; } const base::FilePath& moved_user_data_dir() const { return moved_user_data_dir_; } const base::FilePath& other_file() const { return other_file_; } void SetUp() override { ASSERT_NO_FATAL_FAILURE( registry_override_manager_.OverrideRegistry(root_key_)); ASSERT_TRUE(base::win::RegKey( root_key_, install_static::GetClientStateKeyPath().c_str(), KEY_SET_VALUE | KEY_WOW64_32KEY) .Valid()); InProcessBrowserTest::SetUp(); } bool SetUpUserDataDirectory() override { if (!base::PathService::Get(chrome::DIR_USER_DATA, &user_data_dir_)) return false; other_file_ = user_data_dir_.Append(FILE_PATH_LITERAL("Other File")); moved_user_data_dir_ = user_data_dir_.Append(user_data_dir_.BaseName()) .AddExtension(kDowngradeDeleteSuffix); if (IsPreTest()) { if (!base::WriteFile(other_file_, "data")) return false; const std::string last_version = GetNextChromeVersion(); base::WriteFile(user_data_dir_.Append(kDowngradeLastVersionFile), last_version); } return true; } private: const HKEY root_key_; base::FilePath user_data_dir_; base::FilePath moved_user_data_dir_; base::FilePath other_file_; registry_util::RegistryOverrideManager registry_override_manager_; DISALLOW_COPY_AND_ASSIGN(UserDataDowngradeBrowserTestBase); }
<gh_stars>0 "Local types for evars" import enum import typing # Environment map T_EMAP = typing.Mapping[str, str] T_FILE = typing.Union[typing.TextIO, str] T_FILE_ENCODING = typing.Optional[str] class RetainThe(enum.Enum): # pylint: disable=invalid-name "Return values for the overwrite callback" CURRENT_VALUE = 1 FUTURE_VALUE = 2 T_OVERWRITE_CALLBACK_KEY = str T_OVERWRITE_CALLBACK_CURRENT_VALUE = str T_OVERWRITE_CALLBACK_FUTURE_VALUE = str # A callback which receives the arguments: # key: str # current_value: str # future_value: str # and returns an enumerated value, indicating that the current_value should be # discarded and the future_value retained (RetainThe.FUTURE_VALUE) or that the # future_value should be ignored and the current_value retained # (RetainThe.CURRENT_VALUE). T_OVERWRITE_CALLBACK = typing.Optional[ typing.Callable[ [ T_OVERWRITE_CALLBACK_KEY, T_OVERWRITE_CALLBACK_CURRENT_VALUE, T_OVERWRITE_CALLBACK_FUTURE_VALUE, ], RetainThe, ] ]
<gh_stars>10-100 /* * Copyright 2014 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ module Shumway { import Option = Shumway.Options.Option; import OptionSet = Shumway.Options.OptionSet; import shumwayOptions = Shumway.Settings.shumwayOptions; export let playerOptions = shumwayOptions.register(new OptionSet("Player Options")); export let frameEnabledOption = playerOptions.register( new Shumway.Options.Option("enableFrames", "Enable Frame Execution", "boolean", true, "Enable frame execution.") ); export let timerEnabledOption = playerOptions.register( new Shumway.Options.Option("enableTimers", "Enable Timers", "boolean", true, "Enable timer events.") ); export let pumpEnabledOption = playerOptions.register( new Shumway.Options.Option("enablePump", "Enable Pump", "boolean", true, "Enable display tree serialization.") ); export let pumpRateOption = playerOptions.register( new Shumway.Options.Option("pumpRate", "Pump Rate", "number", 60, "Number of times / second that the display list is synchronized.", { range: { min: 1, max: 120, step: 1 } }) ); export let frameRateOption = playerOptions.register( new Shumway.Options.Option("frameRate", "Frame Rate", "number", -1, "Override a movie's frame rate, set to -1 to use the movies default frame rate.", { range: { min: -1, max: 120, step: 1 } }) ); export let tracePlayerOption = playerOptions.register( new Shumway.Options.Option("tp", "Trace Player", "number", 0, "Trace player every n frames.", { range: { min: 0, max: 512, step: 1 } }) ); export let traceMouseEventOption = playerOptions.register( new Shumway.Options.Option("tme", "Trace Mouse Events", "boolean", false, "Trace mouse events.") ); export let frameRateMultiplierOption = playerOptions.register( new Shumway.Options.Option("", "Frame Rate Multiplier", "number", 1, "Play frames at a faster rate.", { range: { min: 1, max: 16, step: 1 } }) ); export let dontSkipFramesOption = playerOptions.register( new Shumway.Options.Option("", "Disables Frame Skipping", "boolean", false, "Play all frames, e.g. no skipping frame during throttle.") ); export let playAllSymbolsOption = playerOptions.register( new Shumway.Options.Option("", "Play Symbols", "boolean", false, "Plays all SWF symbols automatically.") ); export let playSymbolOption = playerOptions.register( new Shumway.Options.Option("", "Play Symbol Number", "number", 0, "Select symbol by Id.", { range: { min: 0, max: 20000, step: 1 } }) ); export let playSymbolFrameDurationOption = playerOptions.register( new Shumway.Options.Option("", "Play Symbol Duration", "number", 0, "How many frames to play, 0 for all frames of the movie clip.", { range: { min: 0, max: 128, step: 1 } }) ); export let playSymbolCountOption = playerOptions.register( new Shumway.Options.Option("", "Play Symbol Count", "number", -1, "Select symbol count.", { range: { min: -1, max: 20000, step: 1 } }) ); }
/** * Registers this Scenario with a Schema, creating a calculated member * [Scenario].[{id}] for each cube that has writeback enabled. (Currently * a cube has writeback enabled iff it has a dimension called "Scenario".) * * @param schema Schema */ void register(RolapSchema schema) { for (RolapCube cube : schema.getCubeList()) { if (cube.scenarioHierarchy != null) { member = cube.createCalculatedMember( cube.scenarioHierarchy, getId() + "", new ScenarioCalc(this)); assert member != null; } } }
#include <iostream> #include <iomanip> #include <cassert> #include <cmath> #include <cstdio> #include <cstring> #include <cstdlib> #include <map> #include <set> #include <queue> #include <stack> #include <vector> #include <algorithm> #include <numeric> using namespace std; #define eb emplace_back #define pb push_back #define MP make_pair #define fast_cin() ios_base::sync_with_stdio(false); cin.tie(NULL) #define rep(i,j,n) for (int i = j; i < n; ++i) #define repn(i,j,n) for (int i = j; i <= n; ++i) #define revn(i,j,n) for (int i = j; i >= n; --i) #define sz(i) i.size() #define mem(i,v) memset(i,v,sizeof(i)) #define all(v) v.begin(), v.end() #define endl '\n' #define int long long #define INF 1e18 #define ss second #define ff first typedef long long ll; typedef long double ld; typedef pair <int, int> pii; typedef pair <ll, ll> pll; //const int mod=1e9+7; const int mod=998244353; const int up=1e5+10; const int MAXN=1e6+10; // int n,m,x,y,res=0,sz[2*up],par[2*up],ans[2*up]; // int get(int x) // { // return x*(x-1)/2; // } // int find(int x) // { // if(par[x]==x) // return x; // else // return par[x]=find(par[x]); // } // void merge(int u,int v) // { // x=find(u); // y=find(v); // if(sz[x]<sz[y]) // swap(x,y); // res-=get(sz[x]); // res-=get(sz[y]); // sz[x]+=sz[y]; // res+=get(sz[x]); // par[v]=u; // } // void solve() // { // cin>>n>>m; // vector<pii>q(m+1); // vector<pair<int,pii>>e(n); // repn(i,1,n) // { // sz[i]=1; // par[i]=i; // } // repn(i,1,n-1) // { // cin>>e[i].ss.ff>>e[i].ss.ss>>e[i].ff; // } // repn(i,1,m) // { // cin>>q[i].ff; // q[i].ss=i; // } // sort(all(e)); // sort(all(q)); // int pos=1; // repn(i,1,m) // { // while(pos<n && e[pos].ff<=q[i].ff) // { // x=e[pos].ss.ff; // y=e[pos].ss.ss; // merge(x,y); // ++pos; // } // ans[q[i].ss]=res; // } // repn(i,1,m) // { // cout<<ans[i]<<" "; // } // cout<<endl; // } int n; int basis[63],shift[63]; void insert(int mask) { revn(i,59,0) { if((mask & (shift[i]))==0) continue; if(!basis[i]) { basis[i]=mask; return; } mask^=basis[i]; } } void solve() { cin>>n; int a[n+10]; int tot=0; repn(i,1,n) { cin>>a[i]; tot^=a[i]; } shift[0]=1; repn(i,1,59) { shift[i]=shift[i-1]*2; } repn(i,0,59) { if(tot & (shift[i])) { repn(j,1,n) { if(a[j] & (shift[i])) { a[j]-=(shift[i]); } } } } repn(i,1,n) { insert(a[i]); } int ans=0; revn(i,59,0) { if(!basis[i]) continue; if(ans & (shift[i])) continue; ans^=basis[i]; } cout<<(ans+(tot^ans))<<endl; } int32_t main() { int t; // memset(prime, true, sizeof(prime)); // sieve(); // cin>>t; t=1; while(t--) { solve(); } }
/** * Internal statistic calculator * */ class StatisticCalculator { private final long timestamp; private ConcurrentLinkedQueue<Double> storage = new ConcurrentLinkedQueue<>(); private volatile CalculatedStatistics statistics = new CalculatedStatistics(); private final AtomicBoolean frozen = new AtomicBoolean(false); private final ReentrantLock calcLock = new ReentrantLock(); public StatisticCalculator(long timestamp) { this.timestamp = timestamp; } public void freeze(){ calcLock.lock(); try { if(!frozen.get()) { calculate(); frozen.set(true); storage.clear(); } } finally { calcLock.unlock(); } } public boolean addData(double item) { if(frozen.get()){ return false; } storage.add(item); return true; } public boolean calculate(){ if (frozen.get()) { return false; } calcLock.lock(); try { CalculatedStatistics intervalStatistics = new CalculatedStatistics(); if (!storage.isEmpty()) { DescriptiveStatistics data = new DescriptiveStatistics(); storage.forEach(item -> data.addValue(item)); intervalStatistics.count = data.getN(); intervalStatistics.max = data.getMax(); intervalStatistics.mean = data.getMean(); intervalStatistics.min = data.getMin(); intervalStatistics.stddev = data.getStandardDeviation(); intervalStatistics.p50 = data.getPercentile(50); intervalStatistics.p95 = data.getPercentile(95); intervalStatistics.p99 = data.getPercentile(99); intervalStatistics.p999 = data.getPercentile(99.9); intervalStatistics.sum = data.getSum(); } statistics = intervalStatistics; return true; } finally { calcLock.unlock(); } } public CalculatedStatistics getStatistics(){ return statistics; } public long getTimestamp() { return timestamp; } }
// Write a double as a string, with proper formatting for infinity and NaN std::string Serializer::ToString(double v) const { if (std::isnan(v)) { return "Nan"; } if (std::isinf(v)) { return (v < 0 ? "-Inf" : "+Inf"); } return std::to_string(v); }
import os import sys import sideeye DIRNAME = os.path.dirname(os.path.realpath(__file__)) REGION_FILE = sys.argv[1] DA1 = sys.argv[2] ASC = sys.argv[3] CONFIG = sys.argv[4] ITEMS = sideeye.parser.region.file(REGION_FILE, sideeye.config.Configuration(CONFIG)) DA1_EXPERIMENT = sideeye.parser.experiment.parse( DA1, REGION_FILE, sideeye.config.Configuration(CONFIG) ) ASC_EXPERIMENT = sideeye.parser.asc.parse( ASC, ITEMS, sideeye.config.Configuration(CONFIG).asc_parsing ) if DA1_EXPERIMENT != ASC_EXPERIMENT: for key, da1_trial in DA1_EXPERIMENT.trials.items(): asc_trial = ASC_EXPERIMENT.trials[key] if asc_trial != da1_trial: print("\nASC TRIAL\n-----------------------------------------") print(asc_trial) print("\nDA1 TRIAL\n-----------------------------------------") print(da1_trial) print("\n")
Ten-Week-Old Girl With Lethargy, Weakness, and Poor Feeding A 10-week-old previously healthy female infant presented to a community hospital after a brief episode of choking and “turning blue” while feeding. Parents reported a 4-day history of poor feeding, weakness, weak cry, and lethargy. The baby was primarily formula fed with some breast milk and taking about 4 ounces every 4 hours, until 4 days prior to admission when, because of her poor suck, parents began spoon-feeding her 1 to 2 ounces of formula every 4 hours. Review of systems was negative for fever, cough, and diarrhea but notable for 4 days without a bowel movement. Pregnancy was uncomplicated, and prenatal labs, including group B strep, were all negative and/or within normal limits, resulting in the birth of a 3.4 kg infant at 39 weeks’ gestation. Except for a recent diagnosis of oral candidiasis, the baby had no significant past medical history, exposures, sick contacts, or travel. She was being treated with oral nystatin for the candidiasis and had received her 2-month vaccinations. Vital signs on admission were normal for age; weight was 5.2 kg. On initial neurological exam, the patient showed global hypotonia and physiological deep tendon reflexes with hoarse cry and weak suck, but no signs of respiratory distress. Complete metabolic panel and creatinine kinase were within normal limits. C-reactive protein was slightly elevated at 8.95 mg/L (normal <5 mg/L). Urinalysis was unremarkable. White blood cell count was slightly elevated at 16.2 × 10/μL with 49% neutrophils and 42% lymphocytes. No lumbar puncture was performed; head ultrasound was normal. Blood and urine cultures were negative. She was admitted for dehydration and presumed clinical sepsis, treated with 7 days of ceftriaxone and discharged home with an adaptive nipple. However, because of persistent weakness and poor feeding, the primary care pediatrician admitted the patient to a different community teaching hospital 14 days after her symptoms began. Further questioning revealed that the constipation and weakening suck were initially accompanied by general lethargy and marked head lag, which progressed to include first upperand then lower-extremity weakness. Detailed exam included the presence of normoreactive pupils, facial diplegia, diminished/absent gag, muted cry, decreased/absent deep tendon reflexes, and intact anal wink with no tongue fasciculations or ptosis. This combination of progressive peripheral descending hypotonia, facial diplegia, bulbar involvement, and decreased deep tendon reflexes strongly suggested infant botulism. After consultation with the New York City Department of Health and Mental Hygiene (NYC DOHMH), stool samples were submitted to test for botulinum toxin by mouse bioassay and for Clostridium botulinum organism by polymerase chain reaction (PCR). Serum lactate, ammonia, serum carnitine, plasma quantitative amino acids, and urine organic acids were within normal limits. Noncontrast MRI of the brain and spine were normal. The infant was then transferred to our institution for a more extensive neurological evaluation on day 16 of illness.
<filename>src/com/mclean/dao/IUserMapper.java package com.mclean.dao; import com.mclean.bean.UserBean; import java.util.Map; /** * @Title: * @author: IMUKL * @Description: * @date: 2019/3/4 9:49 */ public interface IUserMapper { // 多参数查询,可以最终封装成一个map UserBean selectUserBy(Map<String,Object> map); UserBean selectUser(Integer id); Boolean insertUser(UserBean user); // 返回值代表的是sql执行影响的行数(L > 0 ? true:false) Boolean updateUser(UserBean user); Boolean deleteUser(Integer id); UserBean selectUserAndDept(Integer id); UserBean selectUserStep(Integer id); UserBean selectUserFullInfoBySqlStep(Integer id); }
#include "Wire.h" #include "Adafruit_NeoPixel.h" #define DS3231_I2C_ADDRESS 0x68 #define PIXEL_PIN 14 Adafruit_NeoPixel pixels = Adafruit_NeoPixel(60, PIXEL_PIN, NEO_GRB + NEO_KHZ800); // Convert normal decimal numbers to binary coded decimal byte decToBcd(byte val) { return( (val / 10 * 16) + (val % 10) ); } // Convert binary coded decimal to normal decimal numbers byte bcdToDec(byte val) { return( (val / 16 * 10) + (val % 16) ); } void setDS3231time(byte second, byte minute, byte hour, byte dayOfWeek, byte dayOfMonth, byte month, byte year) { // sets time and date data to DS3231 Wire.beginTransmission(DS3231_I2C_ADDRESS); Wire.write(0); // set next input to start at the seconds register Wire.write(decToBcd(second)); // set seconds Wire.write(decToBcd(minute)); // set minutes Wire.write(decToBcd(hour)); // set hours Wire.write(decToBcd(dayOfWeek)); // set day of week (1=Sunday, 7=Saturday) Wire.write(decToBcd(dayOfMonth)); // set date (1 to 31) Wire.write(decToBcd(month)); // set month Wire.write(decToBcd(year)); // set year (0 to 99) Wire.endTransmission(); } int readDS3231time(byte *second, byte *minute, byte *hour, byte *dayOfWeek, byte *dayOfMonth, byte *month, byte *year) { Wire.beginTransmission(DS3231_I2C_ADDRESS); Wire.write(0); // set DS3231 register pointer to 00h Wire.endTransmission(); int count = Wire.requestFrom(DS3231_I2C_ADDRESS, 7); if(count != 7) return 0; // request seven bytes of data from DS3231 starting from register 00h *second = bcdToDec(Wire.read() & 0x7f); *minute = bcdToDec(Wire.read()); *hour = bcdToDec(Wire.read() & 0x3f); *dayOfWeek = bcdToDec(Wire.read()); *dayOfMonth = bcdToDec(Wire.read()); *month = bcdToDec(Wire.read()); *year = bcdToDec(Wire.read()); return 1; } void displayTime() { byte second, minute, hour, dayOfWeek, dayOfMonth, month, year; // retrieve data from DS3231 int valid = readDS3231time(&second, &minute, &hour, &dayOfWeek, &dayOfMonth, &month, &year); if(! valid) { Serial.print("Invalid\n\r"); return; } // send it to the serial monitor Serial.print(hour, DEC); // convert the byte variable to a decimal number when displayed Serial.print(":"); if (minute < 10) { Serial.print("0"); } Serial.print(minute, DEC); Serial.print(":"); if (second < 10) { Serial.print("0"); } Serial.print(second, DEC); Serial.print(" "); Serial.print(dayOfMonth, DEC); Serial.print("/"); Serial.print(month, DEC); Serial.print("/"); Serial.print(year, DEC); Serial.print(" Day of week: "); switch(dayOfWeek) { case 1: Serial.println("Sunday"); break; case 2: Serial.println("Monday"); break; case 3: Serial.println("Tuesday"); break; case 4: Serial.println("Wednesday"); break; case 5: Serial.println("Thursday"); break; case 6: Serial.println("Friday"); break; case 7: Serial.println("Saturday"); break; } } uint32_t Wheel(byte WheelPos) { WheelPos = 255 - WheelPos; if(WheelPos < 85) { return pixels.Color(255 - WheelPos * 3, 0, WheelPos * 3); } else if(WheelPos < 170) { WheelPos -= 85; return pixels.Color(0, WheelPos * 3, 255 - WheelPos * 3); } else { WheelPos -= 170; return pixels.Color(WheelPos * 3, 255 - WheelPos * 3, 0); } } #define HOUR_COLOR 0xFF0000 // red #define HOUR_COLOR1 0x440000 // red #define HOUR_COLOR2 0x110000 // red int ledno(int ledno) { if(ledno < 0) { ledno = 60 - ledno; } else if(ledno > 60) { ledno -= 60; } return ledno; } /** * Hours are HOUR_COLOR, spread 12 hour over 60 pixels, 5 pixels per hour with the "big" thing in the center. */ void setHour(int hour) { hour %= 12; int firstled = hour * 5 - 2; if(firstled < 0) firstled = 60 - firstled; pixels.setPixelColor(ledno(firstled++), HOUR_COLOR2); pixels.setPixelColor(ledno(firstled++), HOUR_COLOR1); pixels.setPixelColor(ledno(firstled++), HOUR_COLOR); pixels.setPixelColor(ledno(firstled++), HOUR_COLOR1); pixels.setPixelColor(ledno(firstled++), HOUR_COLOR2); } #define MINUTE_COLOR 0x00ff00 #define MINUTE_COLOR1 0x002200 #define SECOND_COLOR 0x0000ff /* * */ void setMinute(byte minute) { int led = minute % 60 - 1; pixels.setPixelColor(ledno(led++), MINUTE_COLOR1); pixels.setPixelColor(ledno(led++), MINUTE_COLOR); pixels.setPixelColor(ledno(led++), MINUTE_COLOR1); } void setSecond(byte second) { int led = second % 60; pixels.setPixelColor(led, SECOND_COLOR); } byte lastSecond = 255; void setLedTime() { byte second, minute, hour, dayOfWeek, dayOfMonth, month, year; // retrieve data from DS3231 int valid = readDS3231time(&second, &minute, &hour, &dayOfWeek, &dayOfMonth, &month, &year); if(! valid) return; if(second == lastSecond) return; lastSecond = second; pixels.clear(); setHour(hour); setMinute(minute); setSecond(second); } void setup() { Wire.begin(); Serial.begin(9600); // pixels.setBrightness(50); pixels.begin(); pixels.show(); // Turn whole clock off // set the initial time here: // DS3231 seconds, minutes, hours, day, date, month, year setDS3231time((byte) 30,(byte)21,(byte)19,(byte)2,(byte)5,(byte)9,(byte)16); } void loop() { setLedTime(); pixels.show(); delay(100); // displayTime(); // display the real-time clock data on the Serial Monitor, // delay(1000); // every second // for(int j = 0; j < 60; j++) { // uint32_t col = Wheel((count + j) & 0xff); // pixels.setPixelColor(j, col); // } }
// ReadFileWithReader and stream on a channel func ReadFileWithReader(r io.Reader) (chan string, error) { out := make(chan string) go func() { defer close(out) scanner := bufio.NewScanner(r) for scanner.Scan() { out <- scanner.Text() } }() return out, nil }
<filename>data_models/parameter_configuration.py class ParameterConfiguration: def __init__(self, input_window_size: int = None, input_data_source=None, optimization_algo=None, criterion=None, scheduler_partial=None, hidden_act_func=None, num_epochs=None, num_model_initializations=None, output_size=None, scaler=None, model=None, num_hidden_units=None, batch_size=None, learning_rate=None, input_size=None, weight_decay=None, dropout=None, is_classification=None, mat_path=None): self.input_window_size = input_window_size self.input_data_source = input_data_source self.model = model self.hidden_act_func = hidden_act_func self.optimization_algo = optimization_algo self.criterion = criterion self.scheduler_partial = scheduler_partial self.num_hidden_units = num_hidden_units self.batch_size = batch_size self.learning_rate = learning_rate self.num_epochs = num_epochs self.num_model_initializations = num_model_initializations self.output_size = output_size self.scaler = scaler self.input_size = input_size self.weight_decay = weight_decay self.dropout = dropout self.is_classification = is_classification self.mat_path = mat_path def __repr__(self): r = '# Parameter Configuration #\n\n' r += 'Mat-Folder path: {}\n'.format(self.mat_path) r += 'Classification: {}\n'.format(self.is_classification) r += 'Input Window Size: {}\n'.format(self.input_window_size) r += 'Input Data Source: {}\n'.format(self.input_data_source.__name__) if self.scaler: try: r += 'Scaler: {}\n'.format(self.scaler.__name__) except AttributeError: r += 'Scaler: {}\n'.format(self.scaler) else: r += 'Scaler: None\n' try: r += 'Activation func: {}\n'.format(self.hidden_act_func.__name__) except AttributeError: r += 'Activation func: {}\n'.format(self.hidden_act_func) r += 'Hidden units: {}\n'.format(self.num_hidden_units) r += 'Dropout probabilities: {}\n'.format(self.dropout) r += 'Batch size: {}\n'.format(self.batch_size) r += 'Learning rate: {}\n'.format(self.learning_rate) r += 'Weight decay: {}\n'.format(self.weight_decay) r += 'Criterion: {}\n'.format(self.criterion) try: r += 'Optimizer: {}\n'.format(self.optimization_algo.__name__) except AttributeError: r += 'Optimizer: {}\n'.format(self.optimization_algo) try: r += 'Scheduler: {}\n'.format(self.scheduler_partial.func.__name__) except AttributeError: r += 'Scheduler: {}\n'.format(self.scheduler_partial) r += 'Num model parameters: {}\n'.format(count_parameters(self.model)) r += 'Model: {}\n'.format(self.model) return r def count_parameters(model): return sum(p.numel() for p in model.parameters() if p.requires_grad)
import React, { useState, useEffect } from 'react'; import { useHistory } from 'react-router-dom'; import Pagination from '@material-ui/lab/Pagination'; import Switch from 'react-switch'; import parse from 'html-react-parser'; import api from '../../services/api'; import { useQuestion } from '../../hooks/question'; import Header from '../../components/Header'; import Button from '../../components/Button'; import { Container, ContentQuestion } from './styles'; interface Alternativas { letra: string; correta: boolean; texto: string; } interface Question { id: string; materia: string; vestibular: string; resolucao: string; enunciado: string; numeroQuestao: number; ano: number; alternativas: Alternativas[]; disponivel: boolean; } const DashboardQuestions = () => { const history = useHistory(); const { selectQuestion } = useQuestion(); const [questions, setQuestions] = useState<Question[]>([]); const [page, setPage] = useState(1); useEffect(() => { api .get('/questions', { params: { page, }, }) .then(response => { setQuestions(response.data); }); }, [page]); const handleAvailability = async (id: String, availability: boolean) => { await api.patch(`/questions/${id}/availability`, { disponivel: availability, }); await api .get('/questions', { params: { page, }, }) .then(response => { setQuestions(response.data); }); }; const handleChangePage = ( event: React.ChangeEvent<unknown>, value: number, ) => { setPage(value); window.scrollTo(0, 0); }; const handleEditQuestion = (question: Question) => { selectQuestion(question); history.push('/editquestion'); }; return ( <Container> <Header /> <ContentQuestion> {questions.map(question => ( <li key={question.id}> <div> <strong>Matéria:</strong> <p>{question.materia}</p> </div> <div> <strong>Vestibular:</strong> <p>{question.vestibular}</p> </div> <div> <strong>Ano:</strong> <p>{question.ano}</p> </div> <div> <strong>Disponível:</strong> <Switch onChange={() => handleAvailability(question.id, !question.disponivel) } checked={question.disponivel} onColor="#04d361" uncheckedIcon={false} checkedIcon={false} /> </div> <div> <strong>Enunciado:</strong> </div> <span>{parse(question.enunciado)}</span> <Button onClick={() => handleEditQuestion(question)}> EDITAR QUESTÃO </Button> </li> ))} <Pagination count={10} page={page} onChange={handleChangePage} size={'small'} /> </ContentQuestion> </Container> ); }; export default DashboardQuestions;
/** * @author wangxing * @create 2020/4/2 */ @Data @NoArgsConstructor @AllArgsConstructor public class TestMessage { private String eventId; private String name; }
ASSOCIATION OF MATERNAL CHARACTERISTICS WITH COMPLICATIONS OF PREGNANCY: A CROSS-SECTIONAL STUDY AMONG MIDDLE SOCIOECONOMIC PREGNANT WOMEN Objective: The objective of the study was to determine the association of maternal characteristics with complications of pregnancy among middle socioeconomic women. Methods: The enrolled subjects were divided into two groups as complicated and uncomplicated group based on the occurrence of complications in current pregnancy and their sociodemographic details along with present and past medical and medication history was collected. Results: The mean age of the study subjects was 25.33±4.22 years. Maternal characteristics such as age, parity, body mass index, maternal education, and employment status did not have a statistically significant association with the complications of pregnancy at p<0.05. However, the first antenatal visit at the gestational age <8 weeks had a statistically significant association with the complications of pregnancy at p=0.02. Conclusion: Early initiation of antenatal care along with adequate antenatal visits may reduce the risk of complications of pregnancy.
package org.firstinspires.ftc.robotcontroller.internal; import com.qualcomm.robotcore.eventloop.opmode.Autonomous; import com.qualcomm.robotcore.eventloop.opmode.Disabled; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.util.Range; import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; /** * Created by Alex on 11/8/2017. * Autonomous Objectives: * --Knock Off Jewel for 30 Points * --Score 1 Glyph (15 points) in Correct Column CryptoBox for (30 points) * --Park in Safe Zone (10 points) * Pseudocode: * 0. Start on balancing stone, jewel mechanism faces the jewel * 1. Knock off jewel (and read Pictograph, and lift Glyph 2 inches up) * 2. Drive forward to drive off balancing stone * 3. Drive backward to allign with balancing stone * 4. Drive forward slowly to position needed to score in correct column * 5. Rotate to -75 degrees to have glyph face CryptoBox * 6. Drive forward toward CryptoBox until glyph is scored * 7. Release the glyph * 8. Rotate to -105 degrees to push glyph * 9. Rotate to -45 degrees to push glyph * 10. Rotate to -90 degrees to be perpendicular with wall * 11. Drive backward a little bit to park * End. Robot ends up aligned to score glyph in specific column of CryptoBox */ @Autonomous(name = "MecanumTestAuto", group = "default") @Disabled public class MecanumTestAuto extends GeorgeOp { //Declare and Initialize any variables needed for this specific autonomous program public MecanumTestAuto() {} @Override public void loop() { //Display Data to be displayed throughout entire Autonomous telemetry.addData(stateName, state); telemetry.addData("current time", String.format("%.1f", this.time)); telemetry.addData("state time", String.format("%.1f", this.time - setTime)); telemetry.addData("FR Pwr", String.format("%.2f",driveFR.getPower())); telemetry.addData("FR Encoder", driveFR.getCurrentPosition()); telemetry.addData("FL Pwr", String.format("%.2f",driveFL.getPower())); telemetry.addData("FL Encoder", driveFL.getCurrentPosition()); telemetry.addData("BR Pwr", String.format("%.2f",driveBR.getPower())); telemetry.addData("BR Encoder", driveBR.getCurrentPosition()); telemetry.addData("BL Pwr", String.format("%.2f",driveBL.getPower())); telemetry.addData("BL Encoder", driveBL.getCurrentPosition()); telemetry.addData("EncoderTargetReached", encoderTargetReached); switch (state) { case 0: stateName = "Initial Calibration"; calibrateVariables(); resetEncoders(); state++; break; case 2: stateName = "Drive FR-Wheel fwd 2 revolution"; driveFR.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveFR.setPower(0.20); if (1120 * 2 - driveFR.getCurrentPosition() <= 10) state++; break; case 4: stateName = "Drive FL-Wheel fwd 2 revolution"; driveFL.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveFL.setPower(0.20); if (1120 * 2 - driveFL.getCurrentPosition() <= 10) state++; break; case 6: stateName = "Drive BR-Wheel fwd 2 revolution"; driveBR.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveBR.setPower(0.20); if (1120 * 2 - driveBR.getCurrentPosition() <= 10) state++; break; case 8: stateName = "Drive BL-Wheel fwd 2 revolution"; driveBL.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveBL.setPower(0.20); if (1120 * 2 - driveBL.getCurrentPosition() <= 10) state++; break; case 10: stateName = "Drive FR & FL fwd 2 revolutions"; driveFR.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveFL.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveFR.setPower(0.20); driveFL.setPower(0.20); if (1120 * 2 - driveFL.getCurrentPosition() <= 10) state++; break; case 12: stateName = "Drive FR & BR fwd 2 revolutions"; driveFR.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveBR.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveFR.setPower(0.20); driveBR.setPower(0.20); if (1120 * 2 - driveBR.getCurrentPosition() <= 10) state++; break; case 14: stateName = "Drive FR & BL fwd 2 revolutions"; driveFR.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveBL.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveFR.setPower(0.20); driveBL.setPower(0.20); if (1120 * 2 - driveBL.getCurrentPosition() <= 10) state++; break; case 1000: //Run When Autonomous is Complete stateName = "Autonomous Complete"; //Set all motors to zero and servos to initial positions calibrateVariables(); resetEncoders(); break; default://Default state used to reset all hardware devices to ensure no errors stateName = "Calibrating"; calibrateVariables(); resetEncoders(); if (waitSec(2)) { state++; setTime = this.time; } break; } } }
// Creates an "error" style pop-up window func alert(caption string) int { format := 0x10 user32 := syscall.NewLazyDLL("user32.dll") captionPtr, _ := syscall.UTF16PtrFromString(caption) titlePtr, _ := syscall.UTF16PtrFromString("winpath") ret, _, _ := user32.NewProc("MessageBoxW").Call( uintptr(0), uintptr(unsafe.Pointer(captionPtr)), uintptr(unsafe.Pointer(titlePtr)), uintptr(format)) return int(ret) }
/** * Generate the HTML source for the specified project. * @param project the project top explore. * @return an html representation of the project. */ public String generate(final Project project) { final StringBuilder sb = new StringBuilder(); sb.append(indent()).append("<div class=\"blockWithHighlightedTitle\" align='center'>\n"); sb.append(incIndent()).append("<table><tr><td align='left'>\n"); sb.append(indent()).append(String.format("<h2><img src='images/icons/monitoring.png' class='titleWithIcon'/>%s</h2>", project.getName())).append('\n'); sb.append(incIndent()).append("<table cellpadding='3px' cellspacing='0'>\n"); sb.append(incIndent()).append("<tr>\n"); final String style1a = "border: 1px solid #6D78B6; border-right: 0px"; final String style1b = "border: 1px solid #6D78B6;"; final String style2a = "border: 1px solid #6D78B6; border-top: 0px; border-right: 0px;"; final String style2b = "border: 1px solid #6D78B6; border-top: 0px;"; sb.append(incIndent()).append("<th align='center' valign='top' style='" + style1a + "'>Build #</th>\n"); sb.append(indent()).append("<th align='center' valign='top' style='" + style1a + "'>Start</th>\n"); sb.append(indent()).append("<th align='center' valign='top' style='" + style1a + "'>Duration</th>\n"); sb.append(indent()).append("<th align='center' style='" + style1b + "'>Tests</th>\n"); sb.append(decIndent()).append("</tr>\n"); for (final Build build: project.getBuilds()) { final TestResults res = build.getTestResults(); sb.append(indent()).append("<tr>\n"); final String icon = "https://www.jppf.org/images/icons/" + ("SUCCESS".equals(build.getResult()) ? "default.png" : "bug1.png"); sb.append(incIndent()).append("<td align='left' valign='bottom' style='" + style2a + "'>").append("<img width='16' height='16' src='" + icon + "'/> ").append(build.getNumber()).append("</td>\n"); sb.append(indent()).append("<td align='right' valign='bottom' style='" + style2a + "'>").append(SDF.format(new Date(build.getStartTime()))).append("</td>\n"); sb.append(indent()).append("<td align='right' valign='bottom' style='" + style2a + "'>").append(StringUtils.toStringDuration(build.getDuration())).append("</td>\n"); sb.append(indent()).append("<td align='right' valign='bottom' style='" + style2b + "'>"); if (res == null) sb.append("N/A"); else sb.append(String.format("%,4d / %,4d / %,4d", res.getTotalCount(), res.getFailures(), res.getSkipped())); sb.append("</td>\n"); sb.append(decIndent()).append("</tr>\n"); } sb.append(decIndent()).append("</table>\n"); sb.append(decIndent()).append("</td></tr></table>"); sb.append(decIndent()).append("<br></div><br>\n"); return sb.toString(); }
/** * Test standard values of the error function. * * <p>The expected values are the probabilities that a Gaussian distribution with mean 0 * and standard deviation 1 contains a value Y in the range [-x, x]. * This is equivalent to erf(x / root(2)). * * @param x the value * @param expected the expected value of erf(x / root(2)) */ @ParameterizedTest @CsvSource({ "0, 0", "1.960, 0.95", "2.576, 0.99", "2.807, 0.995", "3.291, 0.999", }) void testErf(double x, double expected) { x /= Math.sqrt(2); double actual = Erf.value(x); Assertions.assertEquals(expected, actual, 1e-5); Assertions.assertEquals(1 - expected, Erfc.value(x), 1e-5); actual = Erf.value(-x); expected = -expected; Assertions.assertEquals(expected, actual, 1e-5); Assertions.assertEquals(1 - expected, Erfc.value(-x), 1e-5); }
package hyperdrive func (suite *HyperdriveTestSuite) TestTagName() { suite.Equal("param", tagName, "expects tagName to be correct") } func (suite *HyperdriveTestSuite) TestIsAllowedTrue() { suite.Equal(true, suite.TestParsedParam.IsAllowed("GET"), "expects it to return true") } func (suite *HyperdriveTestSuite) TestIsAllowedFalse() { suite.Equal(false, suite.TestParsedParam.IsAllowed("POST"), "expects it to return false") } func (suite *HyperdriveTestSuite) TestIsRequiredTrue() { suite.Equal(true, suite.TestParsedParam.IsRequired("PUT"), "expects it to return true") } func (suite *HyperdriveTestSuite) TestIsRequiredFalse() { suite.Equal(false, suite.TestParsedParam.IsRequired("POST"), "expects it to return false") } func (suite *HyperdriveTestSuite) TestAllowedList() { suite.Equal("GET,PUT", suite.TestParsedParamMap["test_param"].AllowedList(), "expects it to return the correct methods") } func (suite *HyperdriveTestSuite) TestRequiredList() { suite.Equal("PUT", suite.TestParsedParamMap["test_param"].RequiredList(), "expects it to return the correct methods") } func (suite *HyperdriveTestSuite) TestContainsTrue() { suite.Equal(true, contains([]string{"GET"}, "GET"), "expects it to return true") } func (suite *HyperdriveTestSuite) TestContainsFalse() { suite.Equal(false, contains([]string{"GET"}, "POST"), "expects it to return false") } func (suite *HyperdriveTestSuite) TestParse() { suite.IsType(parsedParams{}, parseEndpoint(suite.TestTaggedStruct), "expects it to return a map of parsedParams") } func (suite *HyperdriveTestSuite) TestParsedParamsAllowed() { suite.IsType([]string{}, suite.TestParsedParamMap.Allowed("GET"), "expects it to return the correct slice of strings") } func (suite *HyperdriveTestSuite) TestParsedParamsNotAllowed() { suite.IsType([]string{}, suite.TestParsedParamMap.Allowed("POST"), "expects it to return the correct slice of strings") } func (suite *HyperdriveTestSuite) TestParsedParamsRequired() { suite.IsType([]string{}, suite.TestParsedParamMap.Required("PUT"), "expects it to return the correct slice of strings") } func (suite *HyperdriveTestSuite) TestParsedParamsNotRequired() { suite.IsType([]string{}, suite.TestParsedParamMap.Required("GET"), "expects it to return the correct slice of strings") } func (suite *HyperdriveTestSuite) TestParsedParamsRequiredEmpty() { suite.IsType([]string{}, suite.TestParsedParamMap.Required("POST"), "expects it to return a map of parsedParams") } func (suite *HyperdriveTestSuite) TestParseTestParam() { suite.Equal(suite.TestParsedParamMap["test_param"], parseEndpoint(suite.TestTaggedStruct)["test_param"], "expects it to return the correct parsedParam") } func (suite *HyperdriveTestSuite) TestParseTestParamDefault() { suite.Equal(suite.TestParsedParamMap["test_param_default"], parseEndpoint(suite.TestTaggedStruct)["test_param_default"], "expects it to return the correct parsedParam") } func (suite *HyperdriveTestSuite) TestParseTestParamEmpty() { suite.Equal(suite.TestParsedParamMap["TestParamEmpty"], parseEndpoint(suite.TestTaggedStruct)["TestParamEmpty"], "expects it to return the correct parsedParam") } func (suite *HyperdriveTestSuite) TestParseTestParamRequired() { suite.Equal(suite.TestParsedParamMap["test_param_required"], parseEndpoint(suite.TestTaggedStruct)["test_param_required"], "expects it to return the correct parsedParam") } func (suite *HyperdriveTestSuite) TestParameterName() { suite.Equal("ID", parseEndpoint(suite.TestCustomEndpoint)["id"].Name, "expects it to return the correct Name") } func (suite *HyperdriveTestSuite) TestParameterDesc() { suite.Equal("The unique identifer for this resource.", parseEndpoint(suite.TestCustomEndpoint)["id"].Desc, "expects it to return the correct Desc") } func (suite *HyperdriveTestSuite) TestParseTestParamCustom() { suite.Equal(suite.TestParsedParamCustom, parseEndpoint(suite.TestCustomEndpoint)["id"], "expects it to return the correct parsedParam") }
/** * Adds a set to the given propertyupdate and returns an editor on its * prop. * * @return an editor on a new prop */ public Prop addSet() { Element set = appendChild(root, "set"); Element prop = appendChild(set, "prop"); try { return new Prop(prop); } catch (MalformedElementException e) { Assert.isTrue(false, Policy.bind("assert.internalError")); return null; } }
def mkroot(target='/tmp', name='root'): passwd = getpass('Enter password for root\'s private key: ') install_dir = os.getcwd() builder(target, name, 'root') with fileinput.input('openssl.cnf', inplace=True) as f: for line in f: print(line.replace('{{name}}', name).replace('{{path}}', '{0}/{1}'.format(target, name)), end='') subprocess.call(['openssl', 'genrsa', '-aes256', '-out', 'private/{0}.key.pem'.format(name), '-passout', 'pass:{0}'.format(passwd), '4096'], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) os.chmod('private/{0}.key.pem'.format(name), 0o400) subprocess.call(['openssl', 'req', '-batch', '-config', 'openssl.cnf', '-key', 'private/{0}.key.pem'.format(name), '-passin', 'pass:' + passwd, '-new', '-x509', '-days', '7300', '-sha256', '-extensions', 'v3_ca', '-out', 'certs/{0}.cert.pem'.format(name)], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) os.chmod('certs/{0}.cert.pem'.format(name), 0o444) os.chdir(install_dir) print(''' Root certificate created. Run the command below to verify it: $ openssl x509 -noout -text -in {0}/{1}/certs/{1}.cert.pem '''.format(target, name))
import java.io.FileInputStream; import java.util.Arrays; import java.util.Comparator; import java.util.Scanner; import static java.lang.Integer.min; public class TaskD { int n; int[][] ps; public void solve() throws Exception{ //System.setIn(new FileInputStream("inputs/d.in")); Scanner sc = new Scanner(System.in); n = sc.nextInt(); ps = new int[n][2]; for(int i=0; i<n; i++){ int a, b; a = sc.nextInt(); b = sc.nextInt(); ps[i][0] = a; ps[i][1] = b; } Arrays.sort(ps, new Comparator<int[]>() { @Override public int compare(int[] o1, int[] o2) { return (o1[1] - o1[0]) - (o2[1] - o2[0]); } }); long res = 0; for(int i=0; i<n; i++){ long a = ps[i][0], b = ps[i][1]; res += a * i + b * (n - i - 1); } System.out.println(res); } public static void main(String[] args) throws Exception{ TaskD s = new TaskD(); s.solve(); } }
#!/usr/bin/env python3 import sys, math, itertools, collections, bisect input = lambda: sys.stdin.buffer.readline().rstrip().decode('utf-8') inf = float('inf') ;mod = 10**9+7 mans = inf ;ans = 0 ;count = 0 ;pro = 1 sys.setrecursionlimit(10**7) def dfs(node,prenode,black): if black and B[node]!= -1: return B[node] elif not black and W[node]!= -1: return W[node] res = 1 for vi in G[node]: if vi == prenode: continue tmp = 0 tmp += dfs(vi,node,False) if not black: tmp += dfs(vi,node,True) res = res * tmp % mod if black: B[node] = res else: W[node] = res return res n = int(input()) B = [-1]*n W = [-1]*n G=[[] for i in range(n)] for i in range(n-1): x,y = map(int,input().split()) x-=1;y-=1 G[x].append(y) G[y].append(x) dfs(0,-1,True) dfs(0,-1,False) print((B[0]+W[0])%mod) # print(B) # print(W)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client = ZwaveClient(entry.data[CONF_URL], async_get_clientsession(hass)) dev_reg = await device_registry.async_get_registry(hass) @callback def async_on_node_ready(node: ZwaveNode) -> None: LOGGER.debug("Processing node %s", node) register_node_in_dev_reg(hass, entry, dev_reg, client, node) for disc_info in async_discover_values(node): LOGGER.debug("Discovered entity: %s", disc_info) async_dispatcher_send( hass, f"{DOMAIN}_{entry.entry_id}_add_{disc_info.platform}", disc_info ) node.on( "value notification", lambda event: async_on_value_notification(event["value_notification"]), ) node.on( "notification", lambda event: async_on_notification(event["notification"]) ) @callback def async_on_node_added(node: ZwaveNode) -> None: if node.ready: async_on_node_ready(node) return LOGGER.debug("Node added: %s - waiting for it to become ready.", node.node_id) node.once( "ready", lambda event: async_on_node_ready(event["node"]), ) register_node_in_dev_reg(hass, entry, dev_reg, client, node) @callback def async_on_node_removed(node: ZwaveNode) -> None: dev_id = get_device_id(client, node) device = dev_reg.async_get_device({dev_id}) dev_reg.async_remove_device(device.id) @callback def async_on_value_notification(notification: ValueNotification) -> None: device = dev_reg.async_get_device({get_device_id(client, notification.node)}) value = notification.value if notification.metadata.states: value = notification.metadata.states.get(str(value), value) hass.bus.async_fire( ZWAVE_JS_EVENT, { ATTR_TYPE: "value_notification", ATTR_DOMAIN: DOMAIN, ATTR_NODE_ID: notification.node.node_id, ATTR_HOME_ID: client.driver.controller.home_id, ATTR_ENDPOINT: notification.endpoint, ATTR_DEVICE_ID: device.id, ATTR_COMMAND_CLASS: notification.command_class, ATTR_COMMAND_CLASS_NAME: notification.command_class_name, ATTR_LABEL: notification.metadata.label, ATTR_PROPERTY_NAME: notification.property_name, ATTR_PROPERTY_KEY_NAME: notification.property_key_name, ATTR_VALUE: value, }, ) @callback def async_on_notification(notification: Notification) -> None: device = dev_reg.async_get_device({get_device_id(client, notification.node)}) hass.bus.async_fire( ZWAVE_JS_EVENT, { ATTR_TYPE: "notification", ATTR_DOMAIN: DOMAIN, ATTR_NODE_ID: notification.node.node_id, ATTR_HOME_ID: client.driver.controller.home_id, ATTR_DEVICE_ID: device.id, ATTR_LABEL: notification.notification_label, ATTR_PARAMETERS: notification.parameters, }, ) try: async with timeout(CONNECT_TIMEOUT): await client.connect() except (asyncio.TimeoutError, BaseZwaveJSServerError) as err: LOGGER.error("Failed to connect: %s", err) raise ConfigEntryNotReady from err else: LOGGER.info("Connected to Zwave JS Server") unsubscribe_callbacks: List[Callable] = [] hass.data[DOMAIN][entry.entry_id] = { DATA_CLIENT: client, DATA_UNSUBSCRIBE: unsubscribe_callbacks, } services = ZWaveServices(hass) services.async_register() async_register_api(hass) async def start_platforms() -> None: await asyncio.gather( *[ hass.config_entries.async_forward_entry_setup(entry, component) for component in PLATFORMS ] ) driver_ready = asyncio.Event() async def handle_ha_shutdown(event: Event) -> None: await disconnect_client(hass, entry, client, listen_task, platform_task) listen_task = asyncio.create_task( client_listen(hass, entry, client, driver_ready) ) hass.data[DOMAIN][entry.entry_id][DATA_CLIENT_LISTEN_TASK] = listen_task unsubscribe_callbacks.append( hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, handle_ha_shutdown) ) try: await driver_ready.wait() except asyncio.CancelledError: LOGGER.debug("Cancelling start platforms") return LOGGER.info("Connection to Zwave JS Server initialized") stored_devices = device_registry.async_entries_for_config_entry( dev_reg, entry.entry_id ) known_devices = [ dev_reg.async_get_device({get_device_id(client, node)}) for node in client.driver.controller.nodes.values() ] for device in stored_devices: if device not in known_devices: dev_reg.async_remove_device(device.id) for node in client.driver.controller.nodes.values(): async_on_node_added(node) client.driver.controller.on( "node added", lambda event: async_on_node_added(event["node"]) ) client.driver.controller.on( "node removed", lambda event: async_on_node_removed(event["node"]) ) platform_task = hass.async_create_task(start_platforms()) hass.data[DOMAIN][entry.entry_id][DATA_START_PLATFORM_TASK] = platform_task return True
def build_ppo_moa_trainer(moa_config): tf.keras.backend.set_floatx("float32") trainer_name = "MOAPPOTrainer" moa_ppo_policy = build_tf_policy( name="MOAPPOTFPolicy", get_default_config=lambda: moa_config, loss_fn=loss_with_moa, make_model=build_model, stats_fn=extra_moa_stats, extra_action_fetches_fn=extra_moa_fetches, postprocess_fn=postprocess_ppo_moa, gradients_fn=clip_gradients, before_init=setup_config, before_loss_init=setup_ppo_moa_mixins, mixins=[LearningRateSchedule, EntropyCoeffSchedule, KLCoeffMixin, ValueNetworkMixin] + get_moa_mixins(), ) moa_ppo_trainer = build_trainer( name=trainer_name, default_policy=moa_ppo_policy, make_policy_optimizer=choose_policy_optimizer, default_config=moa_config, validate_config=validate_ppo_moa_config, after_optimizer_step=update_kl, after_train_result=warn_about_bad_reward_scales, mixins=[MOAResetConfigMixin], ) return moa_ppo_trainer
<reponame>leelavg/pylero # -*- coding: utf8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals class PyleroLibException(Exception): pass
// Validate will accept a string that is the manifest file content and validate it func Validate(manifestContent string) (interfaces.Manifest, error) { var err error var manifest interfaces.Manifest var schema gojsonschema.JSONLoader var document gojsonschema.JSONLoader apiVersion, err := getVersion(manifestContent) if err != nil { return nil, err } switch apiVersion { case v1beta1.APIVersion: manifest = &v1beta1.Manifest{} schema = gojsonschema.NewStringLoader(v1beta1.Schema) default: return nil, fmt.Errorf("the manifest apiVersion is not supported: %s", apiVersion) } if err = manifest.Load(manifestContent); err != nil { return nil, fmt.Errorf("could not parse the manifest as %s yaml: %s", v1beta1.APIVersion, err) } document = gojsonschema.NewGoLoader(manifest) result, err := gojsonschema.Validate(schema, document) if err != nil { return nil, fmt.Errorf("error running manifest schema validation: %s", err) } if !result.Valid() { errorList := "manifest validation errors:" for i, validationErr := range result.Errors() { errorList = fmt.Sprintf("%s (%d) %v", errorList, (i + 1), validationErr) } return nil, errors.New(errorList) } return manifest, nil }
<gh_stars>10-100 package commands import ( "context" "io" "github.com/christianalexander/kvdb" "github.com/christianalexander/kvdb/transactors" "github.com/sirupsen/logrus" ) // begin is a command that begins a transaction. type begin struct { writer io.Writer transactor transactors.Transactor setTxID func(int64) } // Execute satisfies the command interface. func (q begin) Execute(ctx context.Context) error { txID, err := q.transactor.Begin(ctx) if err == nil { q.writer.Write([]byte("OK\r\n")) logrus.Printf("Begin setting txid to %d", txID) q.setTxID(txID) } return err } func (q begin) Undo(ctx context.Context) error { return nil } func (q begin) ShouldAutoTransact() bool { return false } // NewBegin creates a new begin command. func NewBegin(writer io.Writer, transactor transactors.Transactor, setTxID func(int64)) kvdb.Command { return begin{writer, transactor, setTxID} }
/// Create the ground plane geometry. fn create_ground_plane_geometry(_context: &Game, shader: GLuint) -> (GLuint, GLuint) { let mesh = include_code!("ground_plane.obj.in"); let mut gp_vp_vbo = 0; unsafe { gl::GenBuffers(1, &mut gp_vp_vbo); gl::BindBuffer(gl::ARRAY_BUFFER, gp_vp_vbo); gl::BufferData( gl::ARRAY_BUFFER, (3 * mem::size_of::<GLfloat>() * mesh.len()) as GLsizeiptr, mesh.points.as_ptr() as *const GLvoid, gl::STATIC_DRAW ); } assert!(gp_vp_vbo > 0); let gp_vp_loc = unsafe { gl::GetAttribLocation(shader, glh::gl_str("vp").as_ptr()) }; assert!(gp_vp_loc > -1); let gp_vp_loc = gp_vp_loc as u32; let mut gp_vp_vao = 0; unsafe { gl::GenVertexArrays(1, &mut gp_vp_vao); gl::BindVertexArray(gp_vp_vao); gl::BindBuffer(gl::ARRAY_BUFFER, gp_vp_vbo); gl::VertexAttribPointer(gp_vp_loc, 3, gl::FLOAT, gl::FALSE, 0, ptr::null()); gl::EnableVertexAttribArray(gp_vp_loc); } assert!(gp_vp_vao > 0); (gp_vp_vbo, gp_vp_vao) }
import { TextDocumentContentProvider, WebviewPanel, EventEmitter, Uri, window, ViewColumn } from 'vscode'; import { join } from 'path'; class TaskWebview implements TextDocumentContentProvider { static type = 'view'; static currentView?: WebviewPanel; static currentInstance?: TaskWebview; onDidChangeEmitter = new EventEmitter<Uri>(); onDidChange = this.onDidChangeEmitter.event; webview?: WebviewPanel; extensionPath: string; provideTextDocumentContent(uri: Uri): string { return TaskWebview.type; } update(content: string, title: string = TaskWebview.type) { const webview = this.webview; if (!webview) return; webview.title = title; webview.webview.html = content; } static createOrShow(extensionPath: string): TaskWebview | undefined { const webview = TaskWebview.currentView; const column = webview ? webview.viewColumn : ViewColumn.One; if (webview) { webview.reveal(column); return; } const newWebview = window.createWebviewPanel(TaskWebview.type, '', column || ViewColumn.One, { enableScripts: true, localResourceRoots: [Uri.file(join(extensionPath, 'out'))], }); return new TaskWebview(newWebview, extensionPath); } dispose() { TaskWebview.currentView?.dispose(); TaskWebview.currentView = void 0; TaskWebview.currentInstance = void 0; } constructor(webview: WebviewPanel, extension: string) { this.webview = webview; this.extensionPath = extension; TaskWebview.currentInstance = this; TaskWebview.currentView = webview; webview.onDidDispose(this.dispose.bind(this), this, []); // tab切换 webview.onDidChangeViewState((...arg) => {}); // webview postMessage webview.webview.onDidReceiveMessage(message => { switch (message.command) { case 'alert': window.showErrorMessage(message.text); return; } }); } } export default TaskWebview;
c_1j = [_ for _ in input()] c_2j = [_ for _ in input()] c_3j = [_ for _ in input()] print(c_1j[0] + c_2j[1] + c_3j[2])
/** * Loads the given URL into {@link #webView}. * * @param url The URL to load. */ protected void loadUrl(@NonNull String url) { if (client != null) { client.setLoadingInitialUrl(true); } WebViewUtil.loadUrlBasedOnOsVersion(this, webView, url, this, errorNotification, okHttpClientProvider); }
/*****************************************************************************/ /* */ /* (C) Copyright 1996 <NAME> */ /* Portions (C) Copyright 1999 <NAME> */ /* */ /* A L L R I G H T S R E S E R V E D */ /* */ /*****************************************************************************/ /* */ /* This source code is NOT in the public domain and it CANNOT be used or */ /* distributed without written permission from the author. */ /* */ /*****************************************************************************/ /* */ /* How to contact the author: <NAME> of 2:332/504@fidonet */ /* <NAME> */ /* 41100 Modena */ /* Italy */ /* */ /*****************************************************************************/ // DirStack.Cpp #include "apgenlib.hpp" #include <string.h> #include <unistd.h> struct _DirStackEl { char Dir[PATH_MAX+1]; _DirStackEl *lower; }; DirStack::DirStack () { Top = NULL; } DirStack::~DirStack () { _DirStackEl *el = Top; while (el) { _DirStackEl *lower = el->lower; delete el; el = lower; } } int DirStack::Push (const char *newdir) { char startpath[PATH_MAX+1]; if (!getcwd (startpath, sizeof (startpath))) // save current path return 1; if (cdd (newdir)) return 1; // change successful: record it _DirStackEl *lower = Top; Top = new _DirStackEl; strcpy (Top->Dir, startpath); Top->lower = lower; return 0; } int DirStack::Pop () { if (!Top) return 1; if (cdd (Top->Dir)) return 1; // popdir successfull: release current block and move pointer _DirStackEl *lower = Top->lower; delete Top; Top = lower; return 0; }
def LBFGS_descent_direction(self, grad) : def scal( s, y ) : return self.scals_L2(s, y) q = grad if len(self.iHessian) > 0 : self.iHessian[-1][1] += q self.iHessian[-1][2] = scal( self.iHessian[-1][0], self.iHessian[-1][1]).inverse() print(self.iHessian[-1][2]) prevs = self.iHessian a = [] for (si, yi, ri) in reversed(prevs) : a.append( ri * scal( si, q ) ) q -= a[-1] * yi if len(prevs) > 0 : Hk0 = scal( prevs[-1][0], prevs[-1][1] ) / scal( prevs[-1][1], prevs[-1][1] ) z = Hk0 * q else : z = q for ((si, yi, ri), ai) in zip( prevs, reversed(a) ) : b = ri * scal( yi, z ) z += (ai - b) * si z = -z return z
When Dr. Jemma Green was completing her PhD she found that apartments in Australia didn't have easy access to renewable energy. She decided to try and solve this problem by opening up the energy market, so people in apartment buildings could trade energy between themselves, without the need for a third-party energy company. To do this, Green developed a battery system which was the first of its kind in Australia. It would help people trade peer-to-peer, but she still needed software that would be able to sustain the process. Her answer was blockchain technology and it was with this idea that Power Ledger was born. Power Ledger is a peer-to-peer, blockchain-based energy trading platform, where renewable energy can be sold between buyers and sellers without a middleman. So, let's take a step back and work through what blockchain technology actually is and how it allows Power Ledger to work towards making the energy market more accessible. Getty Images/iStockphoto How does blockchain have the power make the energy market more democratic? What Is Blockchain, Anyway? Blockchain is an emerging technology that changes the way we trade and transact with each other. One major change that comes with blockchain is the way that it digitises transactions of both currencies and commodities. Another important factor that defines blockchain is the way it establishes peer-to-peer trading. This means that digital currencies or commodities, like energy, can be transferred from one person to another without relying on a middleman like a bank or power company to process the transaction for you. Blockchain is in its early stages of development and will soon become something we use without really knowing. In terms of development, it's like we are back in 1994, when the internet or email was developing -- we don't really understand the full capabilities of blockchain yet, but eventually we will use it as easily as we use the internet now. Sometimes it can be confusing to separate Bitcoin and blockchain but, it's important to remember that Bitcoin is simply the most mature example of a digital currency that uses blockchain technology. Bitcoin and blockchain are not the same thing -- Bitcoin just uses blockchain to move from person to person. The blockchain is able to manage and organise a lot more than just money and the work Power Ledger is doing is a perfect example of how blockchain can be used to make transactions with commodities more transparent. What Is Power Ledger And What Do They Do? So, back to Power Ledger. Power Ledger uses blockchain technology to allow buyers and sellers to trade solar energy between each other without needing to rely on a third-party company. Dr Jemma Green, Co-Founder and Chair of Power Ledger told HuffPost Australia that Power Ledger formed with two key concepts in mind -- peer-to-peer trading across energy networks and peer-to-peer trading of energy within buildings. The focus of these two key areas aims to start solving structural problems in the energy market where people are moving off the grid. "We set up the company in May of last year after seeing some problems in the market. So Dave [Co-Founder Dave Martin] who has been working in energy networks for two decades ... saw that the energy networks business has a structural problem and that because of distributed renewables the network is being used less," Green said. "So people left using it are paying more and [he] saw that the blockchain could encourage more trading across the network." So how are they approaching this widespread problem? "We are moving from a centralised system to a distributed energy system," Green explained. "So, I think that in terms of the peer-to-peer trading product it can be about maintaining the utilisation of the grid and making the idea of going off the grid less attractive because the grid is a very valuable asset. "Not everyone can afford to buy solar panels and they might not have the roof space for it or they are renting and I think our platform gives renters access to renewables." Green also said that Power Ledger's distributed platform means that landlords would have an incentive to invest in renewable energy for their tenants. "Landlords are like 'why would I put solar panels on the roof for the tenant if they get the benefit?', where as, if the landlord gets the income in the form of an electricity bill that would justify them investing in that in the first place." How Is Energy Digitised To Sit On The Blockchain? Power Ledger uses a software solution that measures the amount of electricity consumed or generated. The way the energy is moved from one person to another is via a unit called 'Sparkz' which is a tokenised unit of electricity or a digital representation of the energy that is being moved from the buyer to the seller. "Basically, Power Ledger has a software solution and it connects to smart metres. It reads the outputs of the metres in terms of electricity that is being consumed or generated and it records that on the blockchain," Green told HuffPost Australia. "As somebody purchases or consumes electricity that is also recorded on the blockchain and those two pieces of information from the transaction for which Sparkz are transferred -- so it happens synchronously. The actual recording of the electricity is the movement of Sparkz from the buyer's wallet to the seller's wallet." This means that the buyer is receiving the payment for the electricity at almost the same time the seller receives the payment for the energy. This happens in the form of Sparkz being transferred between the two parties via the blockchain. Once the seller receives the Sparkz into their digital wallet, they can convert those to Australian dollars. "So normally what happens is, you do a transaction and the payment happens afterwards and in electricity markets ... every 60 days you might get a bill and then in the wholesale market it could be 60-80 days, so this means that you get paid for the value that you provide instantly." How Can We Get Involved And Why Should We Care? Power Ledger has had massive success in both Australian and international markets. Even Elon Musk has approached Power Ledger for information about what they are doing in the energy space. Their success is fast proving that digital energy and the vast applications of blockchain technology is something we should be interested in. Currently people can buy POWR Tokens in their sale, which closes Friday October 6 at 6pm AEDT. In their presale, over $AUD17 million was raised and so far in their current sale they have raised $USD10.9 million. This sale is Australia's first ever Initial Coin Offering -- or ICO -- and Power Ledger is calling it the Token Generation Event. An ICO is a sale where a blockchain company like Power Ledger sells parts of their ledger to people and this gives them access to the function of the blockchain. This could be gaining an income stream, owning a portion of an asset or gaining a licence to some kind of data. To participate in the ICO you need to go the Power Ledger website. To purchase a POWR token you need to use a cryptocurrency -- either Bitcoin, Ether or Litecoin. Green says that digital energy and a peer-to-peer network is worth investing in, because it promotes a green and more accessible energy solution. "I would say that Power Ledger's mission is around democratising power and we think ... that is going to become even more pronounced and this is about people getting a return for the contribution that they make to the energy system," Green said. "And they get a proper return for those contributions and I think that the potential with the Power Ledger platform is to create a really fair and transparent energy system, that is low cost and low carbon."
<gh_stars>10-100 package ax25irc; import ax25irc.aprs.parser.APRSPacket; import ax25irc.aprs.parser.APRSTypes; import ax25irc.aprs.parser.Digipeater; import ax25irc.aprs.parser.MessagePacket; import ax25irc.aprs.parser.ObjectPacket; import ax25irc.aprs.parser.Parser; import ax25irc.aprs.parser.PositionPacket; import ax25irc.aprs.parser.Position; import ax25irc.aprs.parser.SymbolDescription; import ax25irc.ircd.server.Channel; import ax25irc.ircd.server.Client; import ax25irc.ircd.server.IRCServer; import ax25irc.ircd.server.MessageListener; import ax25irc.ircd.server.ServMessage; import java.text.DecimalFormat; import java.text.NumberFormat; import java.util.Arrays; import java.util.List; public class AprsMessageProcessor implements MessageListener { IRCServer server; PacketModem modem; String targetCallsign = "APZ017"; Digipeater groupDigis[] = new Digipeater[]{new Digipeater("WIDE1-1")}; Digipeater directDigis[] = new Digipeater[]{new Digipeater("WIDE1-1")}; double baseLatitude = 45.52; double baseLongitude = -122.681944; String baseLocationName = "Portland, OR"; NumberFormat positionFormatter = new DecimalFormat("##0.00000"); public double getBaseLatitude() { return baseLatitude; } public double getBaseLongitude() { return baseLongitude; } public String getBaseLocationName() { return baseLocationName; } public AprsMessageProcessor(IRCServer server, PacketModem modem) { this.server = server; this.modem = modem; } public Digipeater[] getGroupDigis() { return groupDigis; } public Digipeater[] getDirectDigis() { return directDigis; } public void setGroupDigis(String digis) { groupDigis = Digipeater.parseList(digis, true).toArray(new Digipeater[0]); } public void setDirectDigis(String digis) { directDigis = Digipeater.parseList(digis, true).toArray(new Digipeater[0]); } @Override public void onMessage(ServMessage message) { if (message.getConnection() != null && !(message.getConnection() instanceof VirtualConnection)) { List<String> params = message.getParameters(); if (message.getCommand().equals("PRIVMSG")) { String destination = params.get(0); String messagebody = message.getText(); if (messagebody.startsWith("\001DCC SEND")) { if (server.getHost().getHostName().equalsIgnoreCase(destination)) { destination = "A"; } DCCFileTransfer transfer = new DCCFileTransfer(modem, message.getConnection().getNick(), destination, message.getConnection().getIpAddr(), messagebody.substring(1, messagebody.length() - 1)); transfer.start(); } else if ("#APRS".equalsIgnoreCase(destination)) { Position p = new Position(baseLatitude, baseLongitude); PositionPacket pp = new PositionPacket(p, messagebody); p.setSymbolTable('/'); p.setSymbolCode('.'); APRSPacket packet = new APRSPacket(message.getConnection().getNick(), targetCallsign, destination.length() == 1 ? Arrays.asList(groupDigis) : Arrays.asList(directDigis), pp); byte[] frame = packet.toAX25Frame(); try { APRSPacket testPacket = Parser.parseAX25(frame); System.out.println(testPacket); } catch (Exception ex) { ex.printStackTrace(); } modem.sendPacket(frame); } if ("#APRS-CHAT".equalsIgnoreCase(destination) || !destination.startsWith("#")) { if (destination.startsWith("#")) { destination = "A"; } MessagePacket mp = new MessagePacket(destination, messagebody, ""); APRSPacket packet = new APRSPacket(message.getConnection().getNick(), targetCallsign, destination.length() == 1 ? Arrays.asList(groupDigis) : Arrays.asList(directDigis), mp); byte[] frame = packet.toAX25Frame(); modem.sendPacket(frame); } } } } public void process(AX25APRSPacket packet) { Channel aprsChannel = server.getChannel("#APRS"); Channel aprsRawChannel = server.getChannel("#APRS-RAW"); Channel aprsChatChannel = server.getChannel("#APRS-CHAT"); APRSPacket ap = packet.getPacket(); Client client = server.getClient(ap.getSourceCall()); if (client == null) { client = new Client(new VirtualConnection(ap.getSourceCall(), "APRS")); client.getConnection().getUser().setRealName(ap.getAprsInformation().getComment()); client.getConnection().setParentClient(client); server.addClient(client); aprsRawChannel.clientJoin(client); if (ap.getType() == APRSTypes.T_MESSAGE) { aprsChatChannel.clientJoin(client); } else { aprsChannel.clientJoin(client); } } else { //client.getConnection().getUser().setRealName(ap.getAprsInformation().getComment()); } if (ap.getType() == APRSTypes.T_MESSAGE) { MessagePacket msg = (MessagePacket) ap.getAprsInformation(); if (msg.getTargetCallsign().length() > 1) { Client tgtClient = server.getClient(msg.getTargetCallsign()); if (tgtClient != null) { tgtClient.sendMsgAndFlush(new ServMessage(ap.getSourceCall(), "PRIVMSG", msg.getTargetCallsign(), msg.getMessageBody())); if (msg.getMessageNumber().length() > 0) { System.out.println("Message requires ACK. Sending..."); MessagePacket ackMessage = new MessagePacket(ap.getSourceCall(), "ack", msg.getMessageNumber()); APRSPacket ackPacket = new APRSPacket(msg.getTargetCallsign(), targetCallsign, Arrays.asList(directDigis), ackMessage); modem.sendPacket(ackPacket.toAX25Frame()); } } } else { aprsChatChannel.sendMsg(new ServMessage(ap.getSourceCall(), "PRIVMSG", "#APRS-CHAT", msg.getMessageBody())); } } else { aprsChannel.sendMsg(new ServMessage(ap.getSourceCall(), "PRIVMSG", "#APRS", packetToString(ap))); } client.updateLastActive(); aprsChannel.sendMsg(new ServMessage(packet.getAx25Frame().source, "PRIVMSG", "#APRS-RAW", ap.toString())); } public String packetToString(APRSPacket p) { StringBuilder message = new StringBuilder(); switch (p.getType()) { case T_UNSPECIFIED: message.append("Unspecified format "); message.append(p.toString()); break; case T_POSITION: { PositionPacket pp = (PositionPacket) p.getAprsInformation(); message.append("position "); Position pos = pp.getPosition(); if (pos.getAltitude() > 0) { message.append("altitude: " + pos.getAltitude() + " "); } message.append("lat: " + positionFormatter.format(pos.getLatitude()) + ", "); message.append("lon: " + positionFormatter.format(pos.getLongitude()) + ", "); message.append("sym: " + SymbolDescription.decode(pos.getSymbolTable(), pos.getSymbolCode()) + ", "); Position basePos = new Position(getBaseLatitude(), getBaseLongitude()); message.append("distance from " + getBaseLocationName() + ": " + Math.round(basePos.distance(pos)) + "Mi, bearing: " + basePos.bearing(pos) + ", "); message.append("comment: " + pp.getComment()); } break; case T_WX: message.append("weather ").append(p.toString()); break; case T_THIRDPARTY: message.append("3rd party "); String ps = p.toString(); try { APRSPacket p3 = Parser.parse(ps.substring(ps.indexOf('}') + 1)); message.append(packetToString(p3)); } catch (Exception ex) { ex.printStackTrace(); } // BALDPK>APRS:}N7TNG-1>APMI06,TCPIP,BALDPK*:@060639z4538.30N/12240.53W-WX3in1Plus2.0 U=12.5V,T=7.8C/46.0F // BALDPK>APRS:}WB7QAZ-10>APMI06,TCPIP,BALDPK*:@060637z4516.50N/12237.80W_271/000g000t046r000p015P015h97b10192Canby WX break; case T_QUERY: message.append("query ").append(p.toString()); break; case T_OBJECT: { ObjectPacket obj = (ObjectPacket) p.getAprsInformation(); message.append("object "); message.append(obj.getObjectName() + " "); Position pos = obj.getPosition(); if (pos.getAltitude() > 0) { message.append("altitude: " + pos.getAltitude() + " "); } message.append("lat: " + positionFormatter.format(pos.getLatitude()) + " "); message.append("lon: " + positionFormatter.format(pos.getLongitude()) + " "); message.append("sym: " + SymbolDescription.decode(pos.getSymbolTable(), pos.getSymbolCode()) + ", "); Position basePos = new Position(getBaseLatitude(), getBaseLongitude()); message.append("distance from " + getBaseLocationName() + ": " + Math.round(basePos.distance(pos)) + "Mi, bearing: " + basePos.bearing(pos) + ", "); message.append("comment: " + obj.getComment()); } break; case T_ITEM: message.append("item ").append(p.toString()); break; case T_NORMAL: message.append("normal ").append(p.toString()); break; case T_KILL: message.append("kill ").append(p.toString()); break; case T_STATUS: message.append("status ").append(p.toString()); break; case T_STATCAPA: message.append("statcapa ").append(p.toString()); break; case T_TELEMETRY: message.append("telemetry ").append(p.toString()); break; case T_USERDEF: message.append("userdef ").append(p.toString()); break; case T_MESSAGE: MessagePacket msg = (MessagePacket) p.getAprsInformation(); message.append("message "); message.append("to: " + msg.getTargetCallsign() + ", "); message.append("num: " + msg.getMessageNumber() + ", "); message.append("body: " + msg.getMessageBody()); break; case T_NWS: message.append("nws ").append(p.toString()); break; } return message.toString(); } }
Amazon's first e-reader, the Kindle, was so popular that it sold out in a matter of hours (Jon 'ShakataGaNai' Davis / Wikimedia Commons). The legacy of color e-paper may be muted and dim, but its past, at least, is black-and-white: monochrome E Ink set the tone for a decade of reflective, low-power displays. Naturally, it didn't take long for consumers to want more -- sure, a sunlight-readable display that lasted for days on a single charge was great, but what about color? This, too, was in the works for a few years, but progress was slow. Early prototypes from Fujitsu did a decent job of mirroring their monochrome cousins' modest power consumption, but images often appeared washed out and faded, like a newspaper left in the sun too long. The technology failed to beat the next Kindle to market, but improved as the years went on. In the meantime, Barnes & Noble added a splash of color to the e-reader market by attaching a secondary, peripheral LCD display to its Nook e-reader -- providing a vibrant and active navigation hub under its reading surface. The race to create a consumer-ready color e-paper display heated up as Barnes & Noble, Sony and Amazon fought over market share -- if electronic reading devices were to be the next big thing, then surely color would be the category's killer feature. Companies like Samsung, Bridgestone, E Ink (then known as PVI), Fujitsu, Qualcomm, Philips and Plastic Logic spent the better parts of 2009 and 2010 teasing us with brighter screens, faster refresh rates and flexible-display technology. The original nook had a 6-inch e-paper display, with a color touchscreen at the bottom (Andrew Magill (Amagill) / Wikimedia Commons). Despite the excitement surrounding color e-paper, however, few firms were actually ready to put their cards on the table. In early 2010, Amazon CEO Jeff Bezos dispelled rumors of an incoming Kindle Color, saying that current prototypes were simply "not ready for prime-time production," based on what he'd seen in the company's labs. Sony also dodged the subject, committing itself to its existing line of monochrome e-readers until higher-quality panels were available. Even the companies behind the technology openly admitted that it wasn't ready -- PVI and Qualcomm both delayed their respective E Ink Triton and Mirasol color displays, independently describing them as unsatisfactory. Worse still was the high cost of color e-paper development, which drove Samsung to back out of the industry in 2010. Unfortunately, the delays didn't stave off consumer demand for a color reading device, and it came to market through the path of least resistance: the LCD. This veteran technology may not have been able to compete with color e-paper in the arenas of power consumption or visibility in direct sunlight, but it made up for these faults with bright, accurate color reproduction and the ability to play back video content. More importantly, the technology was available, and the growing tablet market soon offered a ready alternative to the developing color e-paper technology. Companies betting on color e-paper were soon forced to re-evaluate their strategies, Qualcomm told Engadget back at SID 2011, citing Apple's inaugural tablet as the catalyst for its partners' reconsideration. The Koobe Jin Yong e-reader with Qualcomm's Mirasol display technology (Qualcomm). The original iPad didn't kill the color e-reader independently, of course -- the device was simply too large and too expensive to scratch the itch for every digital-reading enthusiast with an eye for color. Barnes & Noble's first full-color e-reader didn't have these problems. Launched in late 2010 for $250, the 7-inch Nook Color was the right device at the right time, introducing an affordable color reading device while simultaneously giving the bookseller an edge in the growing e-reader market. It didn't take long for Amazon to react to the positive consumer response, launching its own LCD color e-reader, the Kindle Fire, for a scant $200 the following year. The color e-paper offerings of the same era just couldn't compete -- Kyobo's $310 Mirasol eReader was panned for having poor battery life and unstable software, and an E Ink Triton device by Hanvon priced itself out of the market with a staggering $530 sticker. What's worse, consumers didn't even seem to know these products existed. The damage was done; the category's biggest brands knew they could create a successful color e-reader without next-generation e-paper. By the time ASUS and Google trumped the Kindle Fire with the Nexus 7, the technology was all but forgotten. The damage was done; the category's biggest brands knew they could create a successful color e-reader without next-generation e-paper. Color e-paper may have faded from the public consciousness after media tablets usurped its role in the consumer electronics space, but the technology itself lives on, albeit dimly. PVI, a company so dedicated to reflective-display technology that it changed its name to E Ink Holdings Incorporated, refocused its efforts on new markets, creating programmable supermarket price tags and digital billboards for European firms. It's even limping along in the color e-reader space, although we wouldn't call it a major player -- the most recent device to sport the company's Triton color E Ink display, the Jetbook, sells for an astounding $500. Hardly priced to sell, but the company tells us that it has seen some success in European classrooms. Despite these efforts, the company isn't exactly shining: in its Q2 2013 financial report, E Ink posted a $33.6 million loss -- its biggest in four years. Citing numbers from IHS, the report optimistically looked to Western European purchasing trends to cushion the blow, but more telling are the losses suffered in North America, which is now exhibiting a 15 percent loss in worldwide e-reader shipments when compared to 2011. The devices just aren't selling as fast as they used to. Other companies are sending mixed messages. Qualcomm's Mirasol technology shipped in precious few devices before the company put a lid on production last summer, yet it continues to demonstrate new and intriguing prototypes. At SID 2013, for instance, the company trotted out a smartphone with a reflective 5.1-inch, 2,560 x 1,440 display and a 1.5-inch smartwatch, teasing a future of color e-paper-equipped hardware. The company was quick to point out that the devices were mere mock-ups, but a similar watch surfaced at the company's Uplinq developer conference earlier this week, taking the name of Toq. The smartphone display is still missing in action, however, and Qualcomm says it'll need a few more years in R&D before it's ready for market. When we asked the company if it was still developing screens for color digital readers, Qualcomm representatives could only tell us that they had nothing new to announce. Clearly the company's Mirasol technology is still moving forward, but the firm seems focused on smaller devices. Amazon's recent Liquavista acquisition raises even more questions: if the iPad, Kindle Fire and Nook Color sealed the fate of color e-paper years ago, why did one of the industry's biggest e-reader manufacturers purchase a company known for low-power color displays? Reaching out directly for an answer proved futile for Engadget -- the company won't budge on the future of color e-paper or Amazon's intentions for the next-generation Kindle device. Ectaco's pricey jetBook Color 2 with a Triton 2 color e ink screen (Ectaco). Hushed acquisitions and quiet color-display advancements aren't enough to save color e-paper, however. More daunting than the display category's technological hurdles are the commercial roadblocks in its path: consumers are simply losing interest in the e-reader category as a whole. That certainly isn't to say that it's a dead or dying market, but it's slowly trending toward the niche. According to an IDC forecast released in March, e-reader shipments fell by a staggering 31 percent in a single year -- peaking at 26.4 million in 2011 and dropping to 18.2 million in 2012. At the same time, tablet sales have increased by about 11 percent, with about half of all devices sold falling into an e-reader-competitive form factor, measuring eight inches or smaller. Worse still, these numbers are for traditional monochrome e-readers, not the hopeful color models that failed to take flight. The Kindle brand and its sunlight-readable e-paper display probably aren't going anywhere, but the category is edging away from the mainstream. Users demand more out of their devices these days, and slow-refreshing E Ink just can't cut it for a media tablet. If our predictions for the future need to be grounded in reality, then maybe it's time we finally put our color e-reader dreams to bed. The technology may eventually find a home somewhere, but at this rate, it likely won't be on our nightstand.
/* Copyright (c) 2017 TOSHIBA Digital Solutions Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.toshiba.mwcloud.gs; import java.io.Closeable; import java.util.Collections; import java.util.Properties; import com.toshiba.mwcloud.gs.common.GridStoreFactoryProvider; import com.toshiba.mwcloud.gs.common.LoggingUtils; import com.toshiba.mwcloud.gs.common.LoggingUtils.BaseGridStoreLogger; /** * <div lang="ja"> * {@link GridStore}インスタンスを管理します。 * * <p>{@link GridStore}インスタンス共通のクライアント設定や * 使用済みのコネクションを管理します。</p> * * <p>GridDBにアクセスするためには、このファクトリを介して * {@link GridStore}インスタンスを取得する必要があります。</p> * * <p>このクラスの公開メソッドは、すべてスレッド安全です。</p> * * <p>また、クライアントロギングとクライアント設定ファイルの機能を使用できます。</p> * * <p><b>クライアントロギング</b></p> * <p>クラスパスにロギングライブラリを含めることで、ログ出力を有効にした場合にログを出力できます。</p> * <p>ロギングフレームワークはSLF4Jを使用します。 * ロガーの名称は「com.toshiba.mwcloud.gs.GridStoreLogger」で始まります。 * また、SLF4Jのバージョンは1.6.0以上を推奨しています。</p> * * <p><b>クライアント設定ファイル</b></p> * <p>設定ファイル「gs_client.properties」を含むディレクトリと、設定ライブラリ「gridstore-conf.jar」をクラスパスに * 共に含めることで、{@link GridStoreFactory}に設定ファイルの内容を適用することができます。 * 設定ファイルを用いることで、アプリケーションコードを修正せずに、接続設定を変更できます。 * 設定ファイルには以下のPropertiesオブジェクトと同じプロパティ項目を指定できます。 * 設定ファイルの内容はPropertiesオブジェクトの設定より優先的に適用されます。</p> * <dl><dt>factory カテゴリプロパティ</dt> * <dd>有効なプロパティの項目は{@link GridStoreFactory#setProperties(Properties)}の仕様に準拠します。</dd> * <dd>以下のように「factory.」をプロパティ名の前に付けて記述します。 * <pre>factory.maxConnectionPoolSize = 10</pre> * </dd> * <dt>store カテゴリプロパティ</dt> * <dd>有効なプロパティの項目は{@link GridStoreFactory#getGridStore(Properties)}の仕様に準拠します。</dd> * <dd>以下のように「store.」をプロパティ名の前に付けて記述します。 * <pre>store.clusterName = Project1</pre> * </dd> * <dd> * <p>以下の場合は例外が発生します。</p> * <ul><li>設定ファイルを含む複数のディレクトリがクラスパスに含まれる場合</li> * <li>設定ライブラリのみがクラスパスに含まれる場合</li> * <li>存在しないカテゴリ名が指定された場合</li><li>プロパティ名がカテゴリ名のみからなる場合</li></ul> * <p>また、設定ファイルを含むディレクトリのみがクラスパスに含まれ、設定ライブラリがクラスパスに含まれない場合には、 * 設定ファイルの内容は適用されません。</p> * </dd> * </dl> * * </div><div lang="en"> * Manages a {@link GridStore} instance. * * <p>It manages the client settings shared by {@link GridStore} instances and used connections.</p> * * <p>To access GridDB, you need to get a {@link GridStore} instance using this Factory.</p> * * <p>All the public methods of this class are thread safe.</p> * * <p>Client Logging and Client Properties File are available.</p> * * <p><b>Client Logging</b></p> * <p>Logging is enabled by adding the logging library SLF4J to the classpath.</p> * <p>The name of the logger begins with "com.toshiba.mwcloud.gs.GridStoreLogger". The recommended version of SLF4J * is 1.6.0 or above.</p> * * <p><b>Client Properties File</b></p> * <p>By including both of the directory including the properties file "gs_client.properties" * and the configuration library "gridstore-conf.jar" in the classpath, * properties of the file are applied to GridStoreFactory. * Connection properties can be changed without editing application codes * by using the properties file. * The priority of applying the properties of the file is higher than * the properties of Properties object. * The following properties are available for the Client Properties File. * <dl><dt>Factory Category Property</dt> * <dd>Properties conform to {@link GridStoreFactory#setProperties(Properties)} specifications are available.</dd> * <dd>Add "factory." before each property name as following. * <pre>factory.maxConnectionPoolSize = 10</pre> * </dd> * <dt>Store Category Property</dt> * <dd>Properties conform to {@link GridStoreFactory#getGridStore(Properties)} specifications are available.</dd> * <dd>Add "store." before each property name as following. * <pre>store.clusterName = Project1</pre> * </dd> * <dd> * <p>Exceptions will be thrown in the cases as following.</p> * <ul><li>If two or more directories of the properties files are included in the classpath</li> * <li>If only the configuration library is included in the classpath</li> * <li>If an unavailable property name is specified</li> * <li>If a specified property name is made up of only the category name</li></ul> * <p>By including only the directory including the properties file, properties * of the file are not applied to GridStoreFactory.</p> * * </div> */ public abstract class GridStoreFactory implements Closeable { static { registerLoggers(); } private static final GridStoreFactory INSTANCE = newInstance(); protected GridStoreFactory() { } private static GridStoreFactory newInstance() { return GridStoreFactoryProvider.getProvider( GridStoreFactoryProvider.class, GridStoreFactory.class, Collections.<Class<?>>emptySet()).getFactory(); } /** * <div lang="ja"> * デフォルトのインスタンスを取得します。 * * <p>このクラスのロード時に、{@link GridStoreFactory}クラスのデフォルトのサブクラスが * ロードされ、インスタンスが生成されます。</p> * </div><div lang="en"> * Returns a default instance. * * <p>When loading this class, a default subclass of the {@link GridStoreFactory} * class is loaded and an instance is created.</p> * </div> */ public static GridStoreFactory getInstance() { return INSTANCE; } /** * <div lang="ja"> * 指定のプロパティを持つ{@link GridStore}を取得します。 * * <p>{@link GridStore}を取得した時点では、各{@link Container}を * 管理するマスタノード(以下、マスタ)のアドレス探索を必要に応じて行うだけであり、 * 認証処理は行われません。 * 実際に各{@link Container}に対応するノードに接続する必要が生じた * タイミングで、認証処理が行われます。</p> * * <p>以下のプロパティを指定できます。サポート外の名称のプロパティは * 無視されます。</p> * <table> * <thead><tr><th>名称</th><th>説明</th></tr></thead> * <tbody> * <tr><td>host</td><td>接続先ホスト名。IPアドレス(IPV4のみ)も可。 * マスタを手動指定する場合は必須。マスタを自動検出する場合は設定しない</td></tr> * <tr><td>port</td><td>接続先ポート番号。{@code 0}から * {@code 65535}までの数値の文字列表現。マスタを手動指定する場合は必須。 * マスタを自動検出する場合は設定しない</td></tr> * <tr><td>notificationAddress</td><td>マスタ自動検出に用いられる通知情報を * 受信するためのIPアドレス(IPV4のみ)。省略時はデフォルトのアドレスを使用。 * notificationMemberおよびnotificationProviderと同時に指定することはできない</td></tr> * <tr><td>notificationPort</td><td>マスタ自動検出に用いられる通知情報を * 受信するためのポート番号。{@code 0}から{@code 65535}までの数値の * 文字列表現。省略時はデフォルトのポートを使用</td></tr> * <tr><td>clusterName</td><td>クラスタ名。接続先のクラスタに設定されている * クラスタ名と一致するかどうかを確認するために使用される。省略時もしくは空文字列を * 指定した場合、クラスタ名の確認は行われない</td></tr> * <tr><td>database</td><td>接続先のデータベース名。省略時は全てのユーザが * アクセス可能な「public」データベースに自動接続される。接続ユーザは接続 * データベースに属するコンテナを操作できる。</td></tr> * <tr><td>user</td><td>ユーザ名</td></tr> * <tr><td>password</td><td>ユーザ認証用のパスワード</td></tr> * <tr><td>consistency</td><td>次のいずれかの一貫性レベル。 * <dl> * <dt>{@code "IMMEDIATE"}</dt><dd>他のクライアントからの更新結果は、 * 該当トランザクションの完了後即座に反映される</dd> * <dt>{@code "EVENTUAL"}</dt><dd>他のクライアントからの更新結果は、 * 該当トランザクションが完了した後でも反映されない場合がある。 * {@link Container}に対する更新操作は実行できない</dd> * </dl> * デフォルトでは{@code "IMMEDIATE"}が適用されます * </td></tr> * <tr><td>transactionTimeout</td><td>トランザクションタイムアウト時間の最低値。 * 関係する{@link Container}における各トランザクションの開始時点から適用。 * {@code 0}以上{@link Integer#MAX_VALUE}までの値の文字列表現であり、 * 単位は秒。ただし、タイムアウト時間として有効に機能する範囲に上限があり、 * 上限を超える指定は上限値が指定されたものとみなされる。 * {@code 0}の場合、後続のトランザクション処理がタイムアウトエラーに * なるかどうかは常に不定となる。省略時は接続先GridDB上のデフォルト値を使用 * </td></tr> * <tr><td>failoverTimeout</td><td>フェイルオーバ処理にて新たな接続先が * 見つかるまで待機する時間の最低値。{@code 0}以上 * {@link Integer#MAX_VALUE}までの数値の文字列表現であり、単位は秒。 * {@code 0}の場合、フェイルオーバ処理を行わない。省略時はこのファクトリの * 設定値を使用</td></tr> * <tr><td>containerCacheSize</td><td> * コンテナキャッシュに格納するコンテナ情報の最大個数。 * {@code 0}以上{@link Integer#MAX_VALUE}までの数値の文字列表現。 * 値が{@code 0}の場合、コンテナキャッシュを使用しないことを意味する。 * {@link Container}を取得する際にキャッシュにヒットした場合は、 * GridDBへのコンテナ情報の問い合わせを行わない。 * 省略時は既存の設定値を使用。バージョン1.5よりサポート</td></tr> * <tr><td>dataAffinityPattern</td><td> * データアフィニティ機能のアフィニティ文字列を次のようにコンテナパターン * とペアで任意個数指定する。 * <pre>(コンテナ名パターン1)=(アフィニティ文字列1),(コンテナ名パターン2)=(アフィニティ文字列2),...</pre> * {@link ContainerInfo#setDataAffinity(String)}が未指定の * {@link Container}を追加する際に、コンテナ名が指定したいずれかの * コンテナ名パターンに合致する場合に、ペアで指定したアフィニティ文字列が * 適用される。複数のコンテナ名パターンが合致する場合は、記述された * 順番で最初に合致したパターンが用いられる。コンテナ名パターンは * ワイルドカード「%」を使用できる他は、コンテナ名の規則に準拠する。 * アフィニティ文字列は{@link ContainerInfo#setDataAffinity(String)}の * 規則に準拠する。パターンやその区切りに使用される記号をコンテナ名などに * 用いるには、「\」を用いてエスケープする。ただしコンテナ名やアフィニティ * 文字列の規則に反する記号は使用できない。 * バージョン2.7よりサポート * </td></tr> * <tr><td>notificationMember</td><td> * 固定リスト方式を使用して構成されたクラスタに接続する場合に、クラスタノードのアドレス・ポートのリストを * 次のように指定する。 * <pre>(アドレス1):(ポート1),(アドレス2):(ポート2),...</pre> * notificationAddressおよびnotificationProviderと同時に指定することはできない。 * バージョン2.9よりサポート * </td></tr> * <tr><td>notificationProvider</td><td> * プロバイダ方式を使用して構成されたクラスタに接続する場合に、アドレスプロバイダのURLを指定する。 * notificationAddressおよびnotificationMemberと同時に指定することはできない。 * バージョン2.9よりサポート * </td></tr> * <tr><td>applicationName</td><td>アプリケーションの名前。 * アプリケーションの識別を補助するための情報として、接続先のクラスタ上での * 各種管理情報の出力の際に含められる場合がある。ただし、アプリケーションの * 同一性を どのように定義するかについては関与しない。省略時は * アプリケーション名の指定がなかったものとみなされる。空文字列は指定 * できない。 * バージョン4.2よりサポート</td></tr> * <tr><td>timeZone</td><td>タイムゾーン情報。 * TQLでのTIMESTAMP値演算などに使用される。 * 「{@code ±hh:mm}」または「{@code ±hhmm}」形式によるオフセット値 * ({@code ±}は{@code +}または{@code -}、{@code hh}は時、 * {@code mm}は分)、 「{@code Z}」({@code +00:00}に相当)、 * 「{@code auto}」(実行環境に応じ自動設定)のいずれかを指定する。 * {@code auto}が使用できるのは夏時間を持たないタイムゾーンに * 限定される。 * バージョン4.3よりサポート</td></tr> * </tbody> * </table> * * <p>クラスタ名、データベース名、ユーザ名、パスワードについては、 * ASCIIの大文字・小文字表記の違いがいずれも区別されます。その他、 * これらの定義に使用できる文字種や長さの上限などの制限については、 * GridDBテクニカルリファレンスを参照してください。ただし、制限に反する * 文字列をプロパティ値として指定した場合、各ノードへの接続のタイミングまで * エラーが検知されないことや、認証情報の不一致など別のエラーになる * ことがあります。</p> * * <p>取得のたびに、新たな{@link GridStore}インスタンスが生成されます。 * 異なる{@link GridStore}インスタンスならびに関連するオブジェクトに対する操作は、 * スレッド安全です。すなわち、ある2つのオブジェクトがそれぞれ{@link GridStore} * インスタンスを基にして生成されたものまたは{@link GridStore}インスタンスそのものであり、 * かつ、該当する関連{@link GridStore}インスタンスが異なる場合、一方のオブジェクトに * 対してどのスレッドからどのタイミングでメソッドが呼び出されていたとしても、 * 他方のオブジェクトのメソッドを呼び出すことができます。 * ただし、{@link GridStore}自体のスレッド安全性は保証されていないため、 * 同一{@link GridStore}インスタンスに対して複数スレッドから任意のタイミングで * メソッド呼び出しすることはできません。</p> * * @param properties 取得設定を指示するためのプロパティ * * @throws GSException 指定のホストについて名前解決できなかった場合 * @throws GSException 指定のプロパティが上で説明した形式・制限に * 合致しないことを検知できた場合 * @throws GSException すでにクローズ済みの場合 * @throws NullPointerException {@code properties}に{@code null}が * 指定された場合 * </div><div lang="en"> * TODO Returns a {@link GridStore} with the specified properties. * * <p>When obtaining {@link GridStore}, it just searches for the name of a master node * (hereafter, a master) administering each {@link Container} as necessary, but * authentication is not performed. When a client really needs to connect to * a node corresponding to each Container, authentication is performed.</p> * * <p>The following properties can be specified. Unsupported property * names are ignored.</p> * <table> * <thead><tr><th>Property</th><th>Description</th></tr></thead> * <tbody> * <tr><td>host</td><td>A destination host name. An IP address (IPV4 only) is * also available. Mandatory for manually setting a master. For autodetection * of a master, omit the setting.</td></tr> * <tr><td>port</td><td>A destination port number. A string representing of * a number from {@code 0} to {@code 65535}. Mandatory for manually setting a master. * For autodetection of a master, omit the setting.</td></tr> * <tr><td>notificationAddress</td><td>An IP address (IPV4 only) for receiving * a notification used for autodetection of a master. A default address is * used if omitted. * This property cannot be specified with neither notificationMember nor * notificationProvider properties at the same time. * </td></tr> * <tr><td>notificationPort</td><td>A port number for receiving a notification * used for autodetection of a master. A string representing of a number * from {@code 0} to {@code 65535}. A default port number is used if omitted.</td></tr> * <tr><td>clusterName</td><td>A cluster name. It is used to verify whether it * matches the cluster name assigned to the destination cluster. If it is omitted * or an empty string is specified, cluster name verification is not performed.</td></tr> * <tr><td>database</td><td>Name of the database to be connected. * If it is omitted, "public" database that all users can * access is automatically connected. Users can handle the * containers belonging to the connected database.</td></tr> * <tr><td>user</td><td>A user name</td></tr> * <tr><td>password</td><td>A password for user authentication</td></tr> * <tr><td>consistency</td><td>Either one of the following consistency levels: * <dl> * <dt>{@code "IMMEDIATE"}</dt><dd>The updates by other clients are committed * immediately after a relevant transaction completes.</dd> * <dt>{@code "EVENTUAL"}</dt><dd>The updates by other clients may not be * committed even after a relevant transaction completes. No update operation * cannot be applied to {@link Container}.</dd> * </dl> * By default, "IMMEDIATE" is selected. * </td></tr> * <tr><td>transactionTimeout</td><td>The minimum value of transaction timeout * time. The transaction timeout is counted from the beginning of each * transaction in a relevant {@link Container}. A string representing of a number from {@code 0} * to {@link Integer#MAX_VALUE} in seconds. The value {@code 0} indicates * that it is always uncertain whether a timeout error will occur during * a subsequent transaction. If a value specified over the internal upper limit * of timeout, timeout will occur at the internal upper limit value. If omitted, * the default value used by a destination GridDB is applied. * </td></tr> * <tr><td>failoverTimeout</td><td>The minimum value of waiting time until * a new destination is found in a failover. A numeric string representing * from {@code 0} to {@link Integer#MAX_VALUE} in seconds. The value {@code 0} * indicates that no failover is performed. If omitted, the default value * used by this Factory is applied. </td></tr> * <tr><td>containerCacheSize</td><td>The maximum number of ContainerInfos * on the Container cache. A string representing of a number from * {@code 0} to {@link Integer#MAX_VALUE}. The Container cache is not used if * the value is {@code 0}. To obtain a {@link Container}, its ContainerInfo * might be obtained from the Container cache instead of request to GridDB. * A default number is used if omitted. </td></tr> * <tr><td>dataAffinityPattern</td><td>Specifies the arbitrary * number of patterns as show below, using pairs of an * affinity string for the function of data affinity and a * container pattern. * <pre>(ContainerNamePattern1)=(DataAffinityString1),(ContainerNamePattern2)=(DataAffinityString2),...</pre> * When {@link Container} is added by {@link ContainerInfo#setDataAffinity(String)}, the affinity string * pairing with a container name pattern that matches the * container name is applied. If there are multiple patterns * that match the name, the first pattern in the specified * order is selected. Each container name pattern follows the * naming rules of container, except a wild card character '%' * can also be specified in the pattern. The affinity string * follows the rules of {@link ContainerInfo#setDataAffinity(String)}. * To specify special characters used in the patterns or as * delimiters for the patterns in a container name, etc., they * must be escaped by '\'. But the characters against the * naming rules of container or affinity cannot be specified. * Supported since the version 2.7.</td></tr> * <tr><td>notificationMember</td><td> * A list of address and port pairs in cluster. It is used to connect to * cluster which is configured with FIXED_LIST mode, and specified as * follows. * <pre>(Address1):(Port1),(Address2):(Port2),...</pre> * This property cannot be specified with neither notificationAddress nor * notificationProvider properties at the same time. * This property is supported on version 2.9 or later. * </td></tr> * <tr><td>notificationProvider</td><td> * A URL of address provider. It is used to connect to cluster which is * configured with PROVIDER mode. * This property cannot be specified with neither notificationAddress nor * notificationMember properties at the same time. * This property is supported on version 2.9 or later. * * </td></tr> * <tr><td>applicationName</td><td>Name of an application. It may be * contained in various information for management on the connected * cluster. However, the cluster shall not be involved with the identity of * applications. If the property is omitted, it is regarded that the name * is not specified. Empty string cannot be specified. * This property is supported on version 4.2 or later.</td></tr> * </tbody> * </table> * * <p>Cluster names, database names, user names and passwords * are case-sensitive. See the GridDB Technical Reference for * the details of the limitations, such as allowed characters * and maximum length. When a name violating the limitations has * been specified as a property value, the error detection may * be delayed until the authentication processing. And there * are the cases that the error is identified as an authentication * error, etc., not a violation error for the limitations.</p> * * <p>A new {@link GridStore} instance is created by each call of this method. * Operations on different {@link GridStore} instances and related objects are thread * safe. That is, if some two objects are each created based on {@link GridStore} * instances or they are just {@link GridStore} instances, and if they are related to * different {@link GridStore} instances respectively, any method of one object can be * called, no matter when a method of the other object may be called from any * thread. However, since thread safety is not guaranteed for {@link GridStore} itself, * it is not allowed to call a method of a single {@link GridStore} instance from two or * more threads at an arbitrary time. </p> * * @param properties Properties specifying the settings for the object to be * obtained. * * @throws GSException if host name resolution fails. * @throws GSException if any specified property does not match the format * explained above. * even if connection or authentication will not succeed with their values. * @throws GSException if the connection is closed. * @throws NullPointerException {@code null} is specified as {@code properties}. * </div> */ public abstract GridStore getGridStore( Properties properties) throws GSException; /** * <div lang="ja"> * このファクトリの設定を変更します。 * * <p>設定の変更は、このファクトリより生成された{@link GridStore}、 * ならびに、今後このファクトリで生成される{@link GridStore}に反映されます。</p> * * <p>以下のプロパティを指定できます。サポート外の名称のプロパティは無視されます。</p> * <table> * <thead><tr><th>名称</th><th>説明</th></tr></thead> * <tbody> * <tr><td>maxConnectionPoolSize</td><td>内部で使用される * コネクションプールの最大コネクション数。{@code 0}以上 * {@link Integer#MAX_VALUE}までの数値の文字列表現。 * 値が{@code 0}の場合、コネクションプールを使用しないことを意味する。 * 省略時は既存の設定値を使用</td></tr> * <tr><td>failoverTimeout</td><td> * フェイルオーバ処理にて新たな接続先が * 見つかるまで待機する時間の最低値。{@code 0}以上 * {@link Integer#MAX_VALUE}までの数値の文字列表現であり、単位は秒。 * {@code 0}の場合、フェイルオーバ処理を行わない。 * 省略時は既存の設定値を使用</td></tr> * </tbody> * </table> * * @throws GSException 指定のプロパティが上で説明した形式に合致しない場合 * @throws GSException すでにクローズ済みの場合 * @throws NullPointerException {@code properties}に{@code null}が * 指定された場合 * </div><div lang="en"> * Changes the settings for this Factory. * * <p>The changed settings are reflected in {@link GridStore} already created * by this Factory and {@link GridStore} to be created by this Factory later. </p> * * <p>The following properties can be specified. Unsupported property names are ignored.</p> * <table> * <thead><tr><th>Property</th><th>Description</th></tr></thead> * <tbody> * <tr><td>maxConnectionPoolSize</td><td>The maximum number of connections in the * connection pool used inside. A numeric string representing {@code 0} to * {@link Integer#MAX_VALUE}. The value {@code 0} indicates no use of the * connection pool. If omitted, the default value is used.</td></tr> * <tr><td>failoverTimeout</td><td>The minimum value of waiting time until a new * destination is found in a failover. A numeric string representing {@code 0} * to {@link Integer#MAX_VALUE} in seconds. The value {@code 0} indicates * that no failover is performed. If omitted, the default value is used. * </td></tr> * </tbody> * </table> * * @throws GSException if any specified property does not match the format shown above. * @throws GSException if the connection is closed. * @throws NullPointerException {@code null} is specified as {@code properties}. * </div> */ public abstract void setProperties(Properties properties) throws GSException; /** * <div lang="ja"> * このファクトリより作成された{@link GridStore}をすべてクローズし、 * 必要に応じて関連するリソースを解放します。 * * <p>{@link GSException}が送出された場合でも、関連するコネクションリソースは * すべて解放されます。すでにクローズ済みの場合、このメソッドを呼び出しても * 何の効果もありません。なお、現在のVMの終了時にも呼び出されます。</p> * * @throws GSException クローズ処理中に接続障害などが発生した場合 * * @see Closeable#close() * </div><div lang="en"> * Closes all {@link GridStore} instances created by this Factory and release * related resources as necessary. * * <p>Even if {@link GSException} is thrown, all related connection resources * are released. If the connection is already closed, this method will not work * effectively. It is also called when stopping the current VM.</p> * * @throws GSException if an connection failure etc. occurs while closing. * * @see Closeable#close() * </div> */ public abstract void close() throws GSException; private static void registerLoggers() { boolean loggerAvailable = false; try { Class.forName("org.slf4j.Logger"); Class.forName("org.slf4j.LoggerFactory"); loggerAvailable = true; } catch (ClassNotFoundException e) { } for (String subName : LoggingUtils.SUB_LOGGER_NAMES) { final BaseGridStoreLogger logger; if (loggerAvailable) { try { logger = (BaseGridStoreLogger) Class.forName( LoggingUtils.DEFAULT_LOGGER_NAME + "$" + subName).newInstance(); } catch (Exception e) { throw new Error(e); } } else { logger = null; } LoggingUtils.registerLogger(subName, logger); } } }
<filename>version/version.go package version var Version = "0.0.0-alpha" // Gets set at build time in the Makefile var BuildTime = "UNKNOWN"
// get employees and infos of their offices func GetEmployees() (structs.EmployeesView, error) { var employeesRender structs.EmployeesView employees, err := models.GetEmployeesWithOffice() if err != nil { return employeesRender, err } employeesRender = structs.EmployeesView{ employees, } return employeesRender, nil }
Dermatitis due to parabens in cosmetic creams No histopathology was performed but the appearances of the reaction to the resin were typical of vesicular eczema. We assumed that we were dealing with a true sensitization perhaps combined with some irritant effect, facilitating the onset of sensitization and its continuance. Over the past few years, several authors have reported pathological cutaneous signs due to acrylic resins. The most frequent reactions found by French authors (Ducome et al. 1974, Beurey et al. 1977) seem to be of an irritant type. But Beurey placed emphasis on the fact that four mechanics appointed to maintain the machines presented with a genuine eczema later. In Sweden, Magnusson & Mobacken (1972) and Wahlberg (1974) have published cases of allergic contact dermatitis to acrylic resins. It seems that the main difficulty lies in knowing whether an interplay of irritation and sensitization occurs commonly, or whether true sensitization is the unusual pathological effect of acrylic resins.
/** * This plugin provides an implementation of {@link OnlineClusterer} * extension using clustering components of the Carrot2 project * (<a href="http://www.carrot2.org">http://www.carrot2.org</a>). * * <p>This class hardcodes an equivalent of the following Carrot2 process: * <pre><![CDATA[ * <local-process id="yahoo-lingo"> * <name>Yahoo Search API -- Lingo Classic Clusterer</name> * * <input component-key="input-nutch" /> * <filter component-key="filter-lingo" /> * <output component-key="output-clustersConsumer" /> * </local-process> * ]]></pre> */ public class Clusterer implements OnlineClusterer, Configurable { /** Default language property name. */ private final static String CONF_PROP_DEFAULT_LANGUAGE = "extension.clustering.carrot2.defaultLanguage"; /** Recognizable languages property name. */ private final static String CONF_PROP_LANGUAGES = "extension.clustering.carrot2.languages"; /** Internal clustering process ID in Carrot2 LocalController */ private final static String PROCESS_ID = "nutch-lingo"; public static final Log logger = LogFactory.getLog(Clusterer.class); /** The LocalController instance used for clustering */ private LocalControllerBase controller; /** Nutch configuration. */ private Configuration conf; /** * Default language for hits. English by default, but may be changed * via a property in Nutch configuration. */ private String defaultLanguage = "en"; /** * A list of recognizable languages.. * English only by default, but configurable via Nutch configuration. */ private String [] languages = new String [] {defaultLanguage}; /** * An empty public constructor for making new instances * of the clusterer. */ public Clusterer() { // Don't forget to call {@link #setConf(Configuration)}. } /** * See {@link OnlineClusterer} for documentation. */ public HitsCluster [] clusterHits(HitDetails [] hitDetails, String [] descriptions) { if (this.controller == null) { logger.error("initialize() not called."); return new HitsCluster[0]; } final Map requestParams = new HashMap(); requestParams.put(NutchInputComponent.NUTCH_INPUT_HIT_DETAILS_ARRAY, hitDetails); requestParams.put(NutchInputComponent.NUTCH_INPUT_SUMMARIES_ARRAY, descriptions); try { // The input component takes Nutch's results so we don't need the query argument. final ProcessingResult result = controller.query(PROCESS_ID, "no-query", requestParams); final ArrayOutputComponent.Result output = (ArrayOutputComponent.Result) result.getQueryResult(); final List outputClusters = output.clusters; final HitsCluster [] clusters = new HitsCluster[ outputClusters.size() ]; int j = 0; for (Iterator i = outputClusters.iterator(); i.hasNext(); j++) { RawCluster rcluster = (RawCluster) i.next(); clusters[j] = new HitsClusterAdapter(rcluster, hitDetails); } // invoke Carrot2 process here. return clusters; } catch (MissingProcessException e) { throw new RuntimeException("Missing clustering process.", e); } catch (Exception e) { throw new RuntimeException("Unidentified problems with the clustering.", e); } } /** * Implementation of {@link Configurable} */ public void setConf(Configuration conf) { this.conf = conf; // Configure default language and other component settings. if (conf.get(CONF_PROP_DEFAULT_LANGUAGE) != null) { // Change the default language. this.defaultLanguage = conf.get(CONF_PROP_DEFAULT_LANGUAGE); } if (conf.getStrings(CONF_PROP_LANGUAGES) != null) { this.languages = conf.getStrings(CONF_PROP_LANGUAGES); } if (logger.isInfoEnabled()) { logger.info("Default language: " + defaultLanguage); logger.info("Enabled languages: " + Arrays.asList(languages)); } initialize(); } /** * Implementation of {@link Configurable} */ public Configuration getConf() { return conf; } /** * Initialize clustering processes and Carrot2 components. */ private synchronized void initialize() { // Initialize language list, temporarily switching off logging // of warnings. This is a bit of a hack, but we don't want to // redistribute the entire Carrot2 distro and this prevents // nasty ClassNotFound warnings. final Logger c2Logger = Logger.getLogger("org.carrot2"); final Level original = c2Logger.getLevel(); c2Logger.setLevel(Level.ERROR); AllKnownLanguages.getLanguageCodes(); c2Logger.setLevel(original); // Initialize the controller. controller = new LocalControllerBase(); final Configuration nutchConf = getConf(); final String processResource = nutchConf.get( "extension.clustering.carrot2.process-resource"); if (processResource == null) { logger.info("Using default clustering algorithm (Lingo)."); addDefaultProcess(); } else { logger.info("Using custom clustering process: " + processResource); controller.setComponentAutoload(true); final ControllerHelper helper = new ControllerHelper(); final InputStream is = Thread.currentThread() .getContextClassLoader().getResourceAsStream(processResource); if (is != null) { try { final LocalComponentFactory nutchInputFactory = new LocalComponentFactory() { public LocalComponent getInstance() { return new NutchInputComponent(defaultLanguage); } }; controller.addLocalComponentFactory("input-nutch", nutchInputFactory); final LocalProcess process = helper.loadProcess( helper.getExtension(processResource), is).getProcess(); controller.addProcess(PROCESS_ID, process); is.close(); } catch (IOException e) { logger.error("Could not load process resource: " + processResource, e); } catch (LoaderExtensionUnknownException e) { logger.error("Unrecognized extension of process resource: " + processResource); } catch (InstantiationException e) { logger.error("Could not instantiate process: " + processResource, e); } catch (InitializationException e) { logger.error("Could not initialize process: " + processResource, e); } catch (DuplicatedKeyException e) { logger.error("Duplicated key (unreachable?): " + processResource, e); } catch (MissingComponentException e) { logger.error("Some components are missing, could not initialize process: " + processResource, e); } } else { logger.error("Could not find process resource: " + processResource); } } } /** * Adds a default clustering process using Lingo algorithm. */ private void addDefaultProcess() { try { addComponentFactories(); addProcesses(); } catch (DuplicatedKeyException e) { logger.fatal("Duplicated component or process identifier.", e); } } /** Adds the required component factories to a local Carrot2 controller. */ private void addComponentFactories() throws DuplicatedKeyException { // * <input component-key="input-nutch" /> LocalComponentFactory nutchInputFactory = new LocalComponentFactory() { public LocalComponent getInstance() { return new NutchInputComponent(defaultLanguage); } }; controller.addLocalComponentFactory("input-nutch", nutchInputFactory); // * <filter component-key="filter-lingo" /> LocalComponentFactory lingoFactory = new LocalComponentFactory() { public LocalComponent getInstance() { final HashMap defaults = new HashMap(); // These are adjustments settings for the clustering algorithm. // If you try the live WebStart demo of Carrot2 you can see how they affect // the final clustering: http://www.carrot2.org defaults.put("lsi.threshold.clusterAssignment", "0.150"); defaults.put("lsi.threshold.candidateCluster", "0.775"); // Initialize a new Lingo clustering component. ArrayList languageList = new ArrayList(languages.length); for (int i = 0; i < languages.length; i++) { final String lcode = languages[i]; try { final Language lang = AllKnownLanguages.getLanguageForIsoCode(lcode); if (lang == null) { logger.warn("Language not supported in Carrot2: " + lcode); } else { languageList.add(lang); logger.debug("Language loaded: " + lcode); } } catch (Throwable t) { logger.warn("Language could not be loaded: " + lcode, t); } } return new LingoLocalFilterComponent( (Language []) languageList.toArray(new Language [languageList.size()]), defaults); } }; controller.addLocalComponentFactory("filter-lingo", lingoFactory); // * <output component-key="output-clustersConsumer" /> LocalComponentFactory clusterConsumerOutputFactory = new LocalComponentFactory() { public LocalComponent getInstance() { return new ArrayOutputComponent(); } }; controller.addLocalComponentFactory("output-array", clusterConsumerOutputFactory); } /** * Adds a hardcoded clustering process to the local controller. */ private void addProcesses() { final LocalProcessBase process = new LocalProcessBase( "input-nutch", "output-array", new String [] {"filter-lingo"}, "The Lingo clustering algorithm (www.carrot2.org).", ""); try { controller.addProcess(PROCESS_ID, process); } catch (Exception e) { throw new RuntimeException("Could not assemble clustering process.", e); } } }
def tail(file_name, num_lines): lines = [] for line in reverse_readline(file_name): if not num_lines: break lines.append(line) num_lines -= 1 lines.reverse() return '\n'.join(lines)
<reponame>Gitii/acme-client export class MockHetznerDns { public Zones = { GetAll: jest.fn(), }; public Records = { GetAll: jest.fn(), Create: jest.fn(), Delete: jest.fn(), }; }
// // Copyright (c) 2016-present DeepGrace (complex dot invoke at gmail dot com) // // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // // Official repository: https://github.com/deepgrace/monster // #ifndef OBJECT_POOL_HPP #define OBJECT_POOL_HPP #include <queue> #include <memory> // An object pool that can be used with any class that provides a default constructor. namespace monster { template <typename T> class object_pool { public: using type = std::shared_ptr<T>; object_pool() = default; object_pool(const object_pool<T>&) = delete; object_pool<T>& operator=(const object_pool<T>&) = delete; object_pool(object_pool<T>&&) = delete; object_pool<T>& operator=(object_pool<T>&&) = delete; type allocate(); virtual ~object_pool() = default; private: std::queue<std::unique_ptr<T>> objects; }; template <typename T> typename object_pool<T>::type object_pool<T>::allocate() { if (objects.empty()) objects.emplace(std::make_unique<T>()); std::unique_ptr<T> obj(std::move(objects.front())); objects.pop(); type dst(obj.release(), [this](T* t){ objects.emplace(t); }); return dst; } template <typename T> using object_pool_t = typename object_pool<T>::type; } #endif
/** * A configuration class that exposes CQL statements for the stocks table as beans. * * <p>This component exposes a few {@link SimpleStatement}s that perform both DDL and DML operations * on the stocks table, as well as a few {@link PreparedStatement}s for common DML operations. */ @Configuration @Profile("!unit-test") public class StockQueriesConfiguration { private static final CqlIdentifier STOCKS = CqlIdentifier.fromCql("stocks"); public static final CqlIdentifier SYMBOL = CqlIdentifier.fromCql("symbol"); public static final CqlIdentifier DATE = CqlIdentifier.fromCql("date"); public static final CqlIdentifier VALUE = CqlIdentifier.fromCql("value"); private static final CqlIdentifier START = CqlIdentifier.fromCql("start"); private static final CqlIdentifier END = CqlIdentifier.fromCql("end"); @Bean("stocks.simple.create") public SimpleStatement createTable(@NonNull CqlIdentifier keyspace) { return SchemaBuilder.createTable(keyspace, STOCKS) .ifNotExists() .withPartitionKey(SYMBOL, DataTypes.TEXT) .withClusteringColumn(DATE, DataTypes.TIMESTAMP) .withColumn(VALUE, DataTypes.DECIMAL) .withClusteringOrder(DATE, ClusteringOrder.DESC) .build(); } @Bean("stocks.simple.drop") public SimpleStatement dropTable(@NonNull CqlIdentifier keyspace) { return SchemaBuilder.dropTable(keyspace, STOCKS).ifExists().build(); } @Bean("stocks.simple.truncate") public SimpleStatement truncate(@NonNull CqlIdentifier keyspace) { return QueryBuilder.truncate(keyspace, STOCKS).build(); } @Bean("stocks.simple.insert") public SimpleStatement insert(@NonNull CqlIdentifier keyspace) { return QueryBuilder.insertInto(keyspace, STOCKS) .value(SYMBOL, bindMarker(SYMBOL)) .value(DATE, bindMarker(DATE)) .value(VALUE, bindMarker(VALUE)) .build(); } @Bean("stocks.simple.deleteById") public SimpleStatement deleteById(@NonNull CqlIdentifier keyspace) { return QueryBuilder.deleteFrom(keyspace, STOCKS) .where(column(SYMBOL).isEqualTo(bindMarker(SYMBOL))) .where(column(DATE).isEqualTo(bindMarker(DATE))) .build(); } @Bean("stocks.simple.findById") public SimpleStatement findById(@NonNull CqlIdentifier keyspace) { return QueryBuilder.selectFrom(keyspace, STOCKS) .columns(SYMBOL, DATE, VALUE) .where(column(SYMBOL).isEqualTo(bindMarker(SYMBOL))) .where(column(DATE).isEqualTo(bindMarker(DATE))) .build(); } @Bean("stocks.simple.findBySymbol") public SimpleStatement findBySymbol(@NonNull CqlIdentifier keyspace) { return QueryBuilder.selectFrom(keyspace, STOCKS) .columns(SYMBOL, DATE, VALUE) .where( Relation.column(SYMBOL).isEqualTo(bindMarker(SYMBOL)), // start inclusive Relation.column(DATE).isGreaterThanOrEqualTo(bindMarker(START)), // end exclusive Relation.column(DATE).isLessThan(bindMarker(END))) .build(); } @Bean("stocks.prepared.insert") public PreparedStatement prepareInsert( CqlSession session, @Qualifier("stocks.simple.insert") SimpleStatement stockInsert) { return session.prepare(stockInsert); } @Bean("stocks.prepared.deleteById") public PreparedStatement prepareDeleteById( CqlSession session, @Qualifier("stocks.simple.deleteById") SimpleStatement stockDeleteById) { return session.prepare(stockDeleteById); } @Bean("stocks.prepared.findById") public PreparedStatement prepareFindById( CqlSession session, @Qualifier("stocks.simple.findById") SimpleStatement stockFindById) { return session.prepare(stockFindById); } @Bean("stocks.prepared.findBySymbol") public PreparedStatement prepareFindBySymbol( CqlSession session, @Qualifier("stocks.simple.findBySymbol") SimpleStatement stockFindBySymbol) { return session.prepare(stockFindBySymbol); } }
<filename>material-icons/src/icons/MusicBoxMultipleOutlineIcon.tsx<gh_stars>1-10 import React, { FC } from 'react'; import { Icon, IconInterface } from "@redesign-system/ui-core"; export const MusicBoxMultipleOutlineIcon: FC<IconInterface> = function MusicBoxMultipleOutlineIcon({ className, ...propsRest }) { const classNames = `MusicBoxMultipleOutlineIcon ${className}`; return ( <Icon alt="MusicBoxMultipleOutline" className={classNames} {...propsRest}> <path d="M20,2H8A2,2 0 0,0 6,4V16A2,2 0 0,0 8,18H20A2,2 0 0,0 22,16V4A2,2 0 0,0 20,2M20,16H8V4H20M12.5,15A2.5,2.5 0 0,0 15,12.5V7H18V5H14V10.5C13.58,10.19 13.07,10 12.5,10A2.5,2.5 0 0,0 10,12.5A2.5,2.5 0 0,0 12.5,15M4,6H2V20A2,2 0 0,0 4,22H18V20H4" /> </Icon> ); }; MusicBoxMultipleOutlineIcon.displayName = 'MusicBoxMultipleOutlineIcon';
// NewFSStorage with the specified settings func NewFSStorage(siteBuilder sitebuilder.SiteBuilder) (*FSStorage, error) { viper.SetDefault("storage.rootPath", "/tmp/hyper-cas/storage") viper.SetDefault("storage.sitesPath", "/tmp/hyper-cas/sites") rootPath := viper.GetString("storage.rootPath") sitesPath := viper.GetString("storage.sitesPath") err := os.MkdirAll(rootPath, os.ModePerm) if err != nil { return nil, err } err = os.MkdirAll(sitesPath, os.ModePerm) if err != nil { return nil, err } return &FSStorage{ rootPath: rootPath, sitesPath: sitesPath, siteBuilder: siteBuilder, }, nil }
/** * TODO: experimental, some versions do not download. */ public class MappingsDownloader { private static final String ADDRESS = "http://export.mcpbot.golde.org/"; private static final Map<String, String> STABLE_MAP = new HashMap<>(); private static final Map<String, String> SNAPSHOT_MAP = new HashMap<>(); public static void parseVersions() throws IOException { if (!STABLE_MAP.isEmpty()) return; final InputStream urlInputStream = getURLInputStream(new URL(ADDRESS + "versions.json")); final InputStreamReader inputStreamReader = new InputStreamReader(urlInputStream); final JsonObject object = new JsonParser().parse(inputStreamReader).getAsJsonObject(); for (Map.Entry<String, JsonElement> stringJsonElementEntry : object.entrySet()) { JsonArray stable = stringJsonElementEntry.getValue().getAsJsonObject().get("stable").getAsJsonArray(); if (stable.isEmpty()) { continue; } STABLE_MAP.put(stringJsonElementEntry.getKey(), String.valueOf(stable.get(0).getAsInt())); } } public static void download(String version, File destination) { try { parseVersions(); String stableVersion = STABLE_MAP.get(version); if (stableVersion == null) { } String directoryPath = "mcp_stable/" + stableVersion + "-" + version + "/"; String fileName = "mcp_stable-" + stableVersion + "-" + version + ".zip"; String url = ADDRESS + directoryPath + fileName; File temp = File.createTempFile("mappings-" + version, "stable"); temp.deleteOnExit(); try (BufferedInputStream in = new BufferedInputStream(getURLInputStream(new URL(url))); FileOutputStream out = new FileOutputStream(temp)) { byte[] buffer = new byte[1024]; int read; while ((read = in.read(buffer, 0, 1024)) != -1) { out.write(buffer, 0, read); } ZipFile zipFile = new ZipFile(temp); ZipEntry fields = zipFile.getEntry("fields.csv"); ZipEntry methods = zipFile.getEntry("methods.csv"); InputStream fieldsIn = zipFile.getInputStream(fields); FileOutputStream fieldsFos = new FileOutputStream(new File(destination, "fields.csv")); while ((read = fieldsIn.read(buffer, 0, 1024)) != -1) { fieldsFos.write(buffer, 0, read); } InputStream methodsIn = zipFile.getInputStream(methods); FileOutputStream methodsFos = new FileOutputStream(new File(destination, "methods.csv")); while ((read = methodsIn.read(buffer, 0, 1024)) != -1) { methodsFos.write(buffer, 0, read); } } File srgTemp = File.createTempFile("srg-" + version, "stable"); srgTemp.deleteOnExit(); String srgUrl = ADDRESS + "mcp/" + version + "/mcp-" + version + "-srg.zip"; try (BufferedInputStream in = new BufferedInputStream(getURLInputStream(new URL(srgUrl))); FileOutputStream out = new FileOutputStream(srgTemp)) { byte[] buffer = new byte[1024]; int read; while ((read = in.read(buffer, 0, 1024)) != -1) { out.write(buffer, 0, read); } ZipFile zipFile = new ZipFile(srgTemp); ZipEntry joined = zipFile.getEntry("joined.srg"); InputStream joinedIn = zipFile.getInputStream(joined); FileOutputStream joinedFos = new FileOutputStream(new File(destination, "joined.srg")); while ((read = joinedIn.read(buffer, 0, 1024)) != -1) { joinedFos.write(buffer, 0, read); } } } catch (Exception exception) { exception.printStackTrace(); } } private static InputStream getURLInputStream(URL url) throws IOException { URLConnection connection = url.openConnection(); connection.setRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.95 Safari/537.11"); connection.connect(); return connection.getInputStream(); } }
/** * Helper method to build a new message list for passing to a MessageSpec. */ protected List newMessageList(Message[] messages) { List ret = new ArrayList(); for (int i = 0; i < messages.length; i++) { ret.add(messages[i]); } return ret; }
<gh_stars>100-1000 import { mount } from '@vue/test-utils' import CirclePackingChart, { CirclePackingChartProps, } from '../../src/plots/circle-packing' import data from './circle-packing-data.json' const config: CirclePackingChartProps = { autoFit: true, data: data as Record<string, any>, label: false, legend: false, hierarchyConfig: { sort: (a, b) => b.depth - a.depth, }, } describe('CirclePackingChart', () => { test('should render without crashed', () => { mount(() => <CirclePackingChart {...(config as any)} />) }) })
import sys import os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) import trackeval # noqa: E402 plots_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'data', 'plots')) tracker_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'data', 'trackers')) # dataset = os.path.join('kitti', 'kitti_2d_box_train') # classes = ['cars', 'pedestrian'] dataset = os.path.join('mot_challenge', 'MOT17-train') classes = ['pedestrian'] data_fol = os.path.join(tracker_folder, dataset) trackers = os.listdir(data_fol) out_loc = os.path.join(plots_folder, dataset) for cls in classes: trackeval.plotting.plot_compare_trackers(data_fol, trackers, cls, out_loc)
//%includeGuardStart { #ifndef MUSHGLBUFFERS_H #define MUSHGLBUFFERS_H //%includeGuardStart } p34af5R27BoG6LKqDWQeKQ //%Header { /***************************************************************************** * * File: src/MushGL/MushGLBuffers.h * * Author: <NAME> 2002-2005 * * This file contains original work by <NAME>. The author and his * employer (Mushware Limited) irrevocably waive all of their copyright rights * vested in this particular version of this file to the furthest extent * permitted. The author and Mushware Limited also irrevocably waive any and * all of their intellectual property rights arising from said file and its * creation that would otherwise restrict the rights of any party to use and/or * distribute the use of, the techniques and methods used herein. A written * waiver can be obtained via http://www.mushware.com/. * * This software carries NO WARRANTY of any kind. * ****************************************************************************/ //%Header } DbBqjjQLEDRD5N83MupJQQ /* * $Id: MushGLBuffers.h,v 1.3 2005/07/05 13:52:22 southa Exp $ * $Log: MushGLBuffers.h,v $ * Revision 1.3 2005/07/05 13:52:22 southa * Adanaxis work * * Revision 1.2 2005/07/04 15:59:00 southa * Adanaxis work * * Revision 1.1 2005/07/04 11:10:43 southa * Rendering pipeline * */ #include "MushGLStandard.h" #include "MushGLVertexBuffer.h" #include "MushGLWorkspace.h" //:generate standard ostream xml1 class MushGLBuffers : public MushcoreVirtualObject { public: enum { kOwnerInvalid, kOwnerNone, kOwnerMushGL, kOwnerOther }; typedef Mushware::t4GLVal tVertex; typedef Mushware::t4GLVal tColour; typedef Mushware::t4GLVal tTexCoord; typedef MushGLVertexBuffer<tVertex> tVertexBuffer; typedef MushGLVertexBuffer<tColour> tColourBuffer; typedef MushGLVertexBuffer<tTexCoord> tTexCoordBuffer; typedef MushcoreData<MushGLBuffers, Mushware::U32> tData; typedef MushcoreDataRef<MushGLBuffers, Mushware::U32> tDataRef; MushGLBuffers() : m_owner(kOwnerNone) {} virtual ~MushGLBuffers() {} void Claim(Mushware::U8 inOwner = kOwnerOther); void Release(Mushware::U8 inOwner = kOwnerOther); void Decache(void) {} static Mushware::U32 NextBufferNumAdvance(void) { return ++m_nextBufferNum; } private: Mushware::U8 m_owner; tVertexBuffer m_vertexBuffer; //:read :wref tColourBuffer m_colourBuffer; //:read :wref std::vector<tTexCoordBuffer> m_texCoordBuffers; //:wref MushGLWorkspace<tVertex> m_worldVertices; //:read :wref MushGLWorkspace<tVertex> m_eyeVertices; //:read :wref MushGLWorkspace<tVertex> m_projectedVertices; //:read :wref static Mushware::U32 m_nextBufferNum; //%classPrototypes { public: const tVertexBuffer& VertexBuffer(void) const { return m_vertexBuffer; } // Writable reference for m_vertexBuffer tVertexBuffer& VertexBufferWRef(void) { return m_vertexBuffer; } const tColourBuffer& ColourBuffer(void) const { return m_colourBuffer; } // Writable reference for m_colourBuffer tColourBuffer& ColourBufferWRef(void) { return m_colourBuffer; } // Writable reference for m_texCoordBuffers std::vector<tTexCoordBuffer>& TexCoordBuffersWRef(void) { return m_texCoordBuffers; } const MushGLWorkspace<tVertex>& WorldVertices(void) const { return m_worldVertices; } // Writable reference for m_worldVertices MushGLWorkspace<tVertex>& WorldVerticesWRef(void) { return m_worldVertices; } const MushGLWorkspace<tVertex>& EyeVertices(void) const { return m_eyeVertices; } // Writable reference for m_eyeVertices MushGLWorkspace<tVertex>& EyeVerticesWRef(void) { return m_eyeVertices; } const MushGLWorkspace<tVertex>& ProjectedVertices(void) const { return m_projectedVertices; } // Writable reference for m_projectedVertices MushGLWorkspace<tVertex>& ProjectedVerticesWRef(void) { return m_projectedVertices; } virtual const char *AutoName(void) const; virtual MushcoreVirtualObject *AutoClone(void) const; virtual MushcoreVirtualObject *AutoCreate(void) const; static MushcoreVirtualObject *AutoVirtualFactory(void); virtual void AutoPrint(std::ostream& ioOut) const; virtual bool AutoXMLDataProcess(MushcoreXMLIStream& ioIn, const std::string& inTagStr); virtual void AutoXMLPrint(MushcoreXMLOStream& ioOut) const; //%classPrototypes } wYdzZVuNen/570raLWFIHA }; inline void MushGLBuffers::Claim(Mushware::U8 inOwner) { if (m_owner != kOwnerNone) { std::ostringstream message; message << inOwner; throw MushcoreRequestFail(std::string("Cannot claim ownership of '")+AutoName()+"' (owner is "+message.str()+")"); } m_owner = inOwner; } inline void MushGLBuffers::Release(Mushware::U8 inOwner) { if (m_owner != inOwner) { throw MushcoreRequestFail(std::string("Cannot release ownership of '")+AutoName()+"' - not owner"); } m_owner = kOwnerNone; } //%inlineHeader { inline std::ostream& operator<<(std::ostream& ioOut, const MushGLBuffers& inObj) { inObj.AutoPrint(ioOut); return ioOut; } //%inlineHeader } hApmqKc8J0tzYpk+qEeDlA //%includeGuardEnd { #endif //%includeGuardEnd } hNb4yLSsimk5RFvFdUzHEw