content
stringlengths
10
4.9M
// Update loop, called constantly. // Return false when the program terminates. bool wnd_manager::update(void) { int ch; switch(ch = wgetch(get_wnd_main()->get_wndptr())) { case ('q'): case ('Q'): { return false; } case ('r'): case ('R'): { refresh_from_server(); } break; case ('h'): case ('H'): { app->cur_date_decr(); refresh_from_cache(); } break; case ('l'): case ('L'): { app->cur_date_incr(); refresh_from_cache(); } break; } return true; }
<gh_stars>1-10 package com.fyber.inneractive.sdk.p174i; /* renamed from: com.fyber.inneractive.sdk.i.b */ public enum C7903b { RETURNED_ADTYPE_HTML(4), RETURNED_ADTYPE_MRAID(6), RETURNED_ADTYPE_VAST(8), RETURNED_ADTYPE_HTML5_VIDEO(9), RETURNED_ADTYPE_NATIVE(10), RETURNED_ADTYPE_PMN(13); /* renamed from: g */ private int f16015g; private C7903b(int i) { this.f16015g = i; } /* renamed from: a */ public static C7903b m17817a(int i) { C7903b[] values; for (C7903b bVar : values()) { if (bVar.f16015g == i) { return bVar; } } return null; } }
// NewRequest returns a new Request given a method, URL, and optional body. func NewRequest(method string, url string, body io.Reader) (req *http.Request, err error) { if req, err = http.NewRequest(method, url, body); err != nil { return } req.Header.Set("User-Agent", userAgent) return }
/** * Finds the previous node of the specified value in the linked list * * @param value * @return */ private SNode findPreviousNode(T value) { if (value == null) { throw new RuntimeException("the cache is not supported null value"); } SNode<T> p = headNode; while (p.next != null) { if (value.equals(p.next.value)){ return p; } p = p.next; } return null; }
LONDON—Think of them as the 20th-century kids. Roger Federer and Venus Williams both made their Wimbledon debuts back in the now-mythic 1990s, as talented but raw 17-year-olds. Despite high expectations and obvious skills, neither made it out of the first round. In 1997, Venus lost to Magdalena Gryzybowska of Poland in three sets. “I was so nervous in my first match here, it was a total disaster,” Williams said with a laugh yesterday. “Poor young V.” Two years later, Federer, a wild card, lost to Jiri Novak in five sets. Neither let those setbacks discourage them. There are few tennis players who don’t love Wimbledon, but Federer and Venus identified themselves with this place and this event right from the start, and every year since they’ve made winning it their first order of business. The same is true for the Olympics, which they made cool for tennis players to care about. Venus and Federer were from the first generation of pros who grew up watching tennis at the Games, and they were the first to fully embrace it when they had the opportunity. The thrill of their first Olympics, which came in Sydney in 2000 for both of them, never wore off. Venus and Federer have always been, first and foremost, tennis enthusiasts. The thrill of winning Wimbledon has obviously never worn off for them, either. In 2000, Venus won her first title here; in 2001, Federer upset seven-time champ Pete Sampras in the fourth round in the first undeniable sign of his potential. Venus would go on to win Wimbledon five times, and Federer would tie Sampras with seven. Until this week, those numbers appeared to be set in stone. Williams, now 37, won her last Wimbledon as a wide-eyed 28-year-old in 2009; Federer won his last in 2012, at 30, an age that used to be seen as a Rubicon for tennis players—to go past it meant to go into terminal decline. Now, in a remarkable co-story—a story for the aged, as it were—they’re both back in the final. For Federer, it’s his 11th in 19 trips to Wimbledon; for Venus, it’s her ninth in 20 trips. Federer is the oldest man to reach the final here since Ken Rosewall in 1974; Venus is the oldest woman since Martina Navratilova in 1994. Each has played this tournament with a similar sense of resolve and tenacity; they’ve been determined not to let the chance slip, and as Grand Slam champions tend to do, they’ve improved with each match. Venus and Federer have two of the most reliable, and somewhat underrated, serves in tennis history, and it has all started with that shot during this Wimbledon for them. This was doubly true in their straight-set semifinal wins, over Johanna Konta and Tomas Berdych. Down a break point at 4-4 in the first set, Venus uncorked a 106-m.p.h. second serve. Down two break points in the third set against Berdych, Federer fired three aces and a service winner. As Grand Slam champions tend to do, they’ve found something extra when they’ve needed it. At 1-1 in the second-set tiebreaker against Berdych, Federer suddenly let loose with four straight point-winning forehands to break the match open. For Venus and Federer, it’s about letting instinct—instinct informed by 20 years of experience—take over. “I feel like it reminded me of the matches I’ve had this tournament on some occasions, you know, there were chances for the opponent,” Federer said. “I was able to come up with the goods when it mattered.” “I’m just out there competing,” Venus said after beating Konta. “I try to produce whatever I need at the time. There’s no plan or anything like that. I don’t plan. I’m just trying to compete.” Most people will look at Federer and Venus and wonder how they can physically stay with players a decade, or even two decades, younger than they are. To me, though, what’s surprising and remarkable is what they’ve done mentally this season. For years, as she got older, Venus struggled in close matches; like Rafael Nadal, she specialized in making stirring comebacks, only to lose in the end. Federer’s problem wasn’t losing close matches so much as it was losing late in Grand Slams after looking like the best player in the tournament for the better part of two weeks. It appeared that rather than slowing them down, age had robbed them of just enough of their confidence and edge to make winning major titles seem beyond their reach. But their Australian Open successes—Federer won the tournament, Venus reached the final—signaled a rebirth for both of them, confidence-wise. What’s the secret to their success? Not dwelling on how old they are seems to be one. “I’m not thinking about age,” Venus said earlier this week. When Federer was asked about being in the final at 35, he quickly deflected the question by talking about how happy he was to see Rosewall in the Royal Box on Friday. The other key, of course, is loving what you do. Venus and Federer love tennis like few others have, enough to keep traveling, keep getting on airplanes, keep practicing and keep grinding on tour for 20 years. This, another Wimbledon final, is their reward. But they both know it’s too early to celebrate. “There’s still a lot to be done,” Venus said on Thursday. “I’m definitely excited. There’s one more match that I’d like to, you know, be the winner of. But I like to take courage in the fact that I’ve been playing well this tournament and this year, and all these moments have led to this.” “It’s a big deal,” Federer said on Friday. “I love this tournament. All my dreams came true here as a player ...Yeah, unbelievably excited. I hope I can play one more good match.” Between them they have 72 years, 40 seasons on tour and 12 Wimbledon titles, but Williams and Federer are still talking like rookies. Would you bet against the 20th-century kids this weekend? —GRAND SLAM WEEK: Watch Wimbledon Primetime on Tennis Channel, and catch up on the other 2017 Grand Slams on Tennis Channel Plus —Watch encores from the 2017 French Open and Australian Open on Tennis Channel Plus, including matches like the AO Final showdown between Serena & Venus Williams
// src/lib.rs // // Copyright (c) 2015,2017 rust-mersenne-twister developers // Copyright (c) 2020 <NAME> <<EMAIL>> // // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. All files in the project carrying such notice may not be copied, // modified, or distributed except according to those terms. #![deny(clippy::all)] #![deny(clippy::pedantic)] #![deny(clippy::cargo)] #![allow(unknown_lints)] #![deny(missing_debug_implementations)] #![warn(missing_docs)] #![warn(rust_2018_idioms)] #![warn(trivial_casts, trivial_numeric_casts)] #![warn(unused_qualifications)] #![warn(variant_size_differences)] #![forbid(unsafe_code)] //! Mersenne Twister random number generators. //! //! This is a native Rust implementation of a selection of Mersenne Twister //! generators. Mersenne Twister is not suitable for cryptographic use. //! //! This crate provides: //! //! - [`Mt19937GenRand32`], the original reference Mersenne Twister //! implementation known as `MT19937`. This is a good choice on both 32-bit //! and 64-bit CPUs (for 32-bit output). //! - [`Mt19937GenRand64`], the 64-bit variant of `MT19937` known as //! `MT19937-64`. This algorithm produces a different output stream than //! `MT19937` and produces 64-bit output. This is a good choice on 64-bit //! CPUs. //! //! Both of these use 2.5KB of state. [`Mt19937GenRand32`] uses a 32-bit seed. //! [`Mt19937GenRand64`] uses a 64-bit seed. Both can be seeded from an iterator //! of seeds. //! //! Both RNGs implement a `recover` constructor which can reconstruct the RNG //! state from a sequence of output samples. //! //! # Usage //! //! You can seed a RNG and begin sampling it: //! //! ``` //! # use rand_mt::Mt64; //! // Create the RNG. //! let mut rng = Mt64::new(0x1234_567_89ab_cdef_u64); //! // start grabbing randomness from rng... //! let mut buf = vec![0; 512]; //! rng.fill_bytes(&mut buf); //! ``` //! //! Or if you want to use the default (fixed) seeds that are specified in the //! reference implementations: //! //! ``` //! # use rand_mt::Mt; //! let default = Mt::default(); //! let mt = Mt::new_unseeded(); //! assert_eq!(default, mt); //! ``` //! //! # Crate Features //! //! `rand_mt` is `no_std` compatible. `rand_mt` has several optional features //! that are enabled by default: //! //! - **rand-traits** - Enables a dependency on [`rand_core`]. Activating this //! feature implements `RngCore` and `SeedableRng` on the RNGs in this crate. //! - **std** - Enables a dependency on the Rust Standard Library. Activating //! this feature enables [`std::error::Error`] impls on error types in this //! crate. //! //! Mersenne Twister requires ~2.5KB of internal state. To make the RNGs //! implemented in this crate practical to embed in other structs, you may wish //! to store the RNG in a `Box`. //! //! [`rand_core`]: https://crates.io/crates/rand_core //! [`std::error::error`]: https://doc.rust-lang.org/std/error/trait.Error.html #![doc(html_root_url = "https://docs.rs/rand_mt/4.0.1")] #![cfg_attr(not(feature = "std"), no_std)] // Ensure code blocks in README.md compile #[cfg(doctest)] macro_rules! readme { ($x:expr) => { #[doc = $x] mod readme {} }; () => { readme!(include_str!("../README.md")); }; } #[cfg(doctest)] readme!(); use core::fmt; pub use crate::mt::Mt19937GenRand32; pub use crate::mt64::Mt19937GenRand64; mod mt; mod mt64; #[cfg(test)] mod vectors; /// A type alias for [`Mt19937GenRand32`], 32-bit Mersenne Twister. pub type Mt = Mt19937GenRand32; /// A type alias for [`Mt19937GenRand64`], 64-bit Mersenne Twister. pub type Mt64 = Mt19937GenRand64; /// Error returned from fallible Mersenne Twister recovery constructors. /// /// When the `std` feature is enabled, this type implements `std::error::Error`. #[non_exhaustive] #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum RecoverRngError { /// Attempted to recover an RNG with too many samples. /// /// Recover constructors require an exact number of samples to ensure the /// recovered RNG matches the state of the RNG that supplied all of the /// samples. TooFewSamples(usize), /// Attempted to recover an RNG with too few samples. /// /// Too few samples leaves the internal state buffer partially /// uninitialized. /// /// Recover constructors require an exact number of samples to ensure the /// recovered RNG matches the state of the RNG that supplied all of the /// samples. TooManySamples(usize), } impl fmt::Display for RecoverRngError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::TooFewSamples(expected) => { write!(f, "Too few samples given to recover: expected {}", expected) } Self::TooManySamples(expected) => write!( f, "Too many samples given to recover: expected {}", expected ), } } } #[cfg(feature = "std")] impl std::error::Error for RecoverRngError {}
// SPDX-License-Identifier: MIT OR Apache-2.0 // // Copyright (c) 2020-2021 <NAME> <<EMAIL>> //! Asynchronous exception handling. #[cfg(target_arch = "aarch64")] #[path = "../_arch/aarch64/exception/asynchronous.rs"] mod arch_exception_async; pub use arch_exception_async::*;
/** * Takes a screenshot using super user privileges * */ public void shoot(){ try { Date date = new Date(); android.text.format.DateFormat.format("yyyy-MM-dd hh:mm:ss", date); String fileName = date + ".jpg"; fileName = fileName.replaceAll(" ", "_"); fileName = fileName.replace(":", "."); String filePath = FileUtil.getInstance().getMalwareImagesStorageFolder().getAbsolutePath() + File.separator + fileName; Process process = Runtime.getRuntime().exec("su"); DataOutputStream os = new DataOutputStream(process.getOutputStream()); os.writeBytes("screencap -p " + filePath); os.flush(); os.close(); process.waitFor(); if (process.exitValue() == 0){ Log.d("ScreenshotUtil", "Screenshot taken and saved in " + filePath); }else { Log.d("ScreenshotUtil", "Screenshot not taken"); } } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } }
S = int(input()) yymm = True mmyy = True if S >= 1300 or S < 100: mmyy = False if S%100 >= 13 or S%100 == 0: yymm = False if yymm == True: if mmyy == True: print("AMBIGUOUS") else: print("YYMM") else: if mmyy == True: print("MMYY") else: print("NA")
/** * @file methods/approx_kfn/qdafn.hpp * @author Ryan Curtin * * An implementation of the query-dependent approximate furthest neighbor * algorithm specified in the following paper: * * @code * @incollection{pagh2015approximate, * title={Approximate furthest neighbor in high dimensions}, * author={Pagh, R. and Silvestri, F. and Sivertsen, J. and Skala, M.}, * booktitle={Similarity Search and Applications}, * pages={3--14}, * year={2015}, * publisher={Springer} * } * @endcode * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_APPROX_KFN_QDAFN_HPP #define MLPACK_METHODS_APPROX_KFN_QDAFN_HPP #include <mlpack/core.hpp> namespace mlpack { template<typename MatType = arma::mat> class QDAFN { public: /** * Construct the QDAFN object but do not train it. Be sure to call Train() * before calling Search(). * * @param l Number of projections. * @param m Number of elements to store for each projection. */ QDAFN(const size_t l, const size_t m); /** * Construct the QDAFN object with the given reference set (this is the set * that will be searched). * * @param referenceSet Set of reference data. * @param l Number of projections. * @param m Number of elements to store for each projection. */ QDAFN(const MatType& referenceSet, const size_t l, const size_t m); /** * Train the QDAFN model on the given reference set, optionally setting new * parameters for the number of projections/tables (l) and the number of * elements stored for each projection/table (m). * * @param referenceSet Reference set to train on. * @param l Number of projections. * @param m Number of elements to store for each projection. */ void Train(const MatType& referenceSet, const size_t l = 0, const size_t m = 0); /** * Search for the k furthest neighbors of the given query set. (The query set * can contain just one point, that is okay.) The results will be stored in * the given neighbors and distances matrices, in the same format as the * mlpack NeighborSearch and LSHSearch classes. */ void Search(const MatType& querySet, const size_t k, arma::Mat<size_t>& neighbors, arma::mat& distances); //! Serialize the model. template<typename Archive> void serialize(Archive& ar, const uint32_t /* version */); //! Get the number of projections. size_t NumProjections() const { return candidateSet.size(); } //! Get the candidate set for the given projection table. const MatType& CandidateSet(const size_t t) const { return candidateSet[t]; } //! Modify the candidate set for the given projection table. Careful! MatType& CandidateSet(const size_t t) { return candidateSet[t]; } private: //! The number of projections. size_t l; //! The number of elements to store for each projection. size_t m; //! The random lines we are projecting onto. Has l columns. arma::mat lines; //! Projections of each point onto each random line. arma::mat projections; //! Indices of the points for each S. arma::Mat<size_t> sIndices; //! Values of a_i * x for each point in S. arma::mat sValues; // Candidate sets; one element in the vector for each table. std::vector<MatType> candidateSet; }; } // namespace mlpack // Include implementation. #include "qdafn_impl.hpp" #endif
As President Obama hosts leaders from Saudi Arabia, Qatar, Oman, Bahrain, Kuwait, and the UAE on May 13 and 14, he will surely hear a push from them for a White House plan to contain Iran. However, recent candid remarks from Admiral Mike Mullen should cause America to think twice. Going against conventional wisdom in Washington, the former Chairman of the Joint Chiefs of Staff said: “[A nuclear deal] would also more fairly rebalance American influence. We need to re-examine all of the relationships we enjoy in the region, relationships primarily with Sunni-dominated nations. Détente with Iran might better balance our efforts across the sectarian divide.” Let that sink in. The highest-ranking officer in the United States Armed Forces from 2007 to 2011 is essentially saying that America’s long-standing allies in the Middle East are trying to lock it into permanent confrontation with Iran–and into a permanent alliance with countries whose interests and values are increasingly opposed to its own. After the initial shock from Admiral Mullen’s intellectual honesty subsides, one quickly realizes that he is right: Why shouldn’t the U.S. have more options at its disposal to achieve its interests and reduce the threats it faces? For example, after 15 Saudi hijackers attacked the U.S. on September 11th, 2001, American decision-makers did not have the option of being firm with Saudi Arabia. Instead, they were trapped in an alliance precisely because there was no regional alternative that could be leveraged to hold the Saudis accountable. Obama is often accused of waffling in his projection of American power–most recently by George W. Bush–but it appears that he simply agrees with his former top military commander: American interests in the Middle East are best served by abandoning the failed policies of Dual Containment and the Bush Doctrine that unnecessarily sapped American blood and treasure. Rather than deploying the military to dominate the region, the U.S. instead seeks to forge pragmatic, functioning relations throughout the region to maximize its flexibility. That is what balancing is all about. But what exactly does it entail? For the US, balancing is based on two key functions. First, it means that America’s strategic goal is not promoting Israeli or Saudi domination of the Middle East, but rather ensuring that no regional power cements political, economic or military domination. This helps safeguard against hostile countries adversely affecting energy production and exports–and by extension, the global economy. For example, when U.S. and Iranian tactical or strategic interests converge in the fight against ISIS, they can work together in Iraq against Saudi, Qatari, Turkish and other malign influence. Second, and perhaps more importantly, balancing also ensures that America can work with all countries to reduce security threats as they evolve. For example, when U.S. and Iranian tactical or strategic interests converge in the fight against ISIS, they can work together in Iraq against Saudi, Qatari, Turkish and other malign influence. When Washington and Tehran see their interests diverge in Yemen, the U.S. can team up with Saudi Arabia and other Persian Gulf countries to work against Iranian interests. The foremost benefit of working with a greater number of countries can be summed up in two words: leverage and options. An ill-conceived war in Iraq, an oil-for-security pact with Saudi Arabia, and an inability to hold Israel accountable for its occupation of Palestinian territory have severely reduced American influence and flexibility in the Middle East. Rebalancing will not change the status quo on these issues, but it will impose a cost on countries that take U.S. support for granted. Riyadh and Tel Aviv will have to think twice before implementing policies that damage American security interests if they know Washington can pick up the phone and call Tehran instead. The more options America has, the greater its leverage becomes. For its part, Tehran already knows that the Saudis and Israelis are more than willing to help America contain and challenge Iran. But Iranian decision-makers have signaled their willingness to accept an American rebalancing–so long as the U.S. accepts Iran’s prominent role in the region and respects Iran’s interests and security concerns. As one senior Iranian official explained to me, Tehran is open to working in coalitions – both tactically and strategically, as well as regionally and globally – because it believes it can achieve its strategic objectives by balancing cooperation and competition, particularly vis-à-vis the United States. Rebalancing after a nuclear deal is preferable to the regional status quo, but it is not a silver bullet. More specifically, Iran is using the nuclear negotiations as a litmus test to see if it can manage–and gradually improve–its relations with the U.S. by containing existing tensions and preventing new conflicts from emerging. Earlier this month, Ayatollah Khamenei confirmed this approach when he said: “If the other side gives up its usual diversionary tactics, this will become an experience for us that, very well, we can negotiate with them on other issues.” Iran is willing to test this proposition precisely because it has remained independent from foreign powers, thereby providing it with greater flexibility than its regional neighbors. To be clear, rebalancing after a nuclear deal is preferable to the regional status quo, but it is not a silver bullet. Washington and Tehran will likely not become allies, but the bilateral relationship can shift from enemies to competitors–relations in which they continue to challenge one another’s power, but with diplomats rather than bombs or bullets. That is why regional integration through dialogue and engagement–even if it’s a love-hate relationship–provides the best hope for shifting Iranian policy in such a way that accepts U.S. global leadership, maximizes Washington’s flexibility, and reduces the threats to American interests in the Middle East. This article is part of Quartz Ideas, our home for bold arguments and big thinkers.
Refundability and Price: Empirical Analysis on the Airline Industry This paper provides new evidence on price dispersion in the US airline industry. Using the observed fare differences between refundable and non-refundable tickets, we first document evidence on the prices passengers pay for a refund option. We find that the factors related to the value of refund option and customersi¯ individual demand uncertainty have a significant effect on the relative refund fares. This finding is robust for various market structures. Further, taking into account the variations of the relative refund fares, we investigate the effects of market structure on price dispersion.
def masked_number(self): return self.bin + "******" + self.last_4
export default { root: { backgroundColor: "#909090" } };
#include<iostream> #include<math.h> #include<stdio.h> using namespace std; int main(){ double l,d,v,g,r; cin>>l>>d>>v>>g>>r; double ting,deng; ting=0; deng=d/v; while(deng>=(g+r)){ deng=deng-g-r; ting=ting+g+r; } if (deng>=g){ ting=ting+r+g; ting=(l-d)/v+ting;} else{ ting=l/v; } printf("%.8lf",ting); cout<<endl; }
/** Shows the results dialog when the test activity closes. */ @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { super.onActivityResult(requestCode, resultCode, intent); showDialog(RESULTS_DIALOG); }
package com.enderio.core.client.gui.button; import com.enderio.core.api.client.gui.IHideable; import net.minecraft.client.gui.widget.button.Button; import net.minecraft.util.text.ITextComponent; public class HideableButton extends BaseButton implements IHideable { public HideableButton(int x, int y, int width, int height, ITextComponent buttonText) { super(x, y, width, height, buttonText); } public HideableButton(int x, int y, int width, int height, ITextComponent buttonText, IPressable pressedAction) { super(x, y, width, height, buttonText, pressedAction); } public HideableButton(int x, int y, int width, int height, ITextComponent buttonText, Button.ITooltip onTooltip) { super(x, y, width, height, buttonText, onTooltip); } public HideableButton(int x, int y, int width, int height, ITextComponent buttonText, IPressable pressedAction, Button.ITooltip onTooltip) { super(x, y, width, height, buttonText, pressedAction, onTooltip); } @Override public void setIsVisible(boolean visible) { this.visible = visible; } @Override public boolean isVisible() { return visible; } }
/* * 类定义 * * Point2D::Point2D() ... * Point2D::Point2D(double x, double y) ... * x: 0.666 * y: 0.875 * Point2D::y() ... * Point2D::setX() ... * x: 0.875 * Point2D::x() ... * Point2D::setY() ... * y: 0.875 */ #include "point2d_2.h" int main() { Point2D alpha; Point2D beta(0.666, 0.875); alpha.setX(beta.y()); beta.setY(alpha.x()); return 0; }
/** * Pool controller. This class exposes the org.apache.commons.pool.KeyedObjectPool class. * * @see https://www.javacodegeeks.com/2013/02/pool-of-ssh-connections-using-apache-keyedobjectpool.html * @author Marco Castigliego * @author Daniele Maddaluno */ public class StackSessionPool { private KeyedObjectPool<ServerDetails, Session> pool; private static final StackSessionPool instance = new StackSessionPool(); private StackSessionPool(){ startPool(); } public static StackSessionPool getInstance() { return instance; } /** * @return the org.apache.commons.pool.KeyedObjectPool class */ public KeyedObjectPool<ServerDetails, Session> getPool() { return pool; } /** * @return the org.apache.commons.pool.KeyedObjectPool class */ public void startPool() { // replaced this from 1.x: new StackKeyedObjectPool<ServerDetails, Session>(new SessionFactory(), 1); GenericKeyedObjectPoolConfig config = new GenericKeyedObjectPoolConfig(); config.setMaxTotalPerKey(1); pool = new GenericKeyedObjectPool<ServerDetails, Session>(new SessionFactory(), config); } }
Earth’s largest inland body of water has been slowly evaporating for the past two decades due to rising temperatures associated with climate change, a new study finds. Water levels in the Caspian Sea dropped nearly 7 centimeters (3 inches) per year from 1996 to 2015, or nearly 1.5 meters (5 feet) total, according to the new study. The current Caspian Sea level is only about 1 meter (3 feet) above the historic low level it reached in the late 1970s. Increased evaporation over the Caspian Sea has been linked to increased surface air temperatures. According to the data from the study, the average yearly surface temperature over the Caspian Sea rose by about 1 degree Celsius (1.8 degrees Fahrenheit) between the two time frames studied, 1979-1995 and 1996-2015. These rising temperatures are likely a result of climate change, according to the study’s authors. Evaporation brought about by warming temperatures appears to be the primary cause of the current drop in sea level and the decline will likely continue as the planet warms, according to the study’s authors. “From our point of view as geoscientists, it’s an interesting place because it’s possible to construct a sort of budget for the total amount of water that’s there,” said Clark Wilson, a geophysicist with the Jackson School of Geosciences at the University of Texas at Austin, and co-author of the new study published in Geophysical Research Letters, a journal of the American Geophysical Union. “The real control that causes it to go up and down over long periods of time is really most likely the evaporation, which is almost completely dominated by temperature.” The Caspian Sea, located between Europe and Asia, is roughly the size of Montana at 371,000 square kilometers (143,244 square miles). It has experienced substantial changes in its water level over the past several hundred years, but previous studies were unable to nail down the exact causes of the sea level changes. The Caspian Sea is bordered by five countries and contains an abundance of natural resources and diverse wildlife. The sea also contains oil and natural gas reserves, and is an important resource for fisheries in the surrounding countries. Continue reading at American Geophysical Union Image via Jianli Chen, American Geophysical Union
Metabolic reprogramming ensures cancer cell survival despite oncogenic signaling blockade Lue et al. show that although inhibition of PI3K–AKT–mTOR signaling markedly decreased glycolysis and restrained tumor growth, these signaling and metabolic restrictions triggered autophagy. Survival of cancer cells was critically dependent on phospholipase A2 (PLA2) to mobilize lysophospholipids and free fatty acids to sustain fatty acid oxidation and oxidative phosphorylation. Despite significant advances in precision cancer therapies, tumor regressions are variable and rarely complete. Although the molecular basis of how cancer cells survive therapies that are designed to kill them (i.e. drug-tolerant "persister" cells) is likely due to a mixed set of mechanisms, we reasoned that at its root are subpopulations of drug-tolerant cancer cells that can rewire their signaling and metabolic networks to adapt to treatment-imposed proliferative, survival, and nutrient restrictions. While rewired compensatory oncogenic signaling (e.g., mediated through bypass pathways, receptor amplification, and second site mutations) have been well documented, little is known about the metabolic reprogramming induced by treatment and how this contributes to resistance. To better understand the metabolic consequences of anti-cancer treatment, we studied metabolic reprogram-ming in the context of PI3K pathway inhibition. The PI3K pathway, which includes the PI3K holoenzyme and its truncal effector kinases, AKT and mTOR, is essential for cell growth, proliferation, survival, and metabolism. However, clinical responses to PI3K-AKT-mTOR inhibitors have been modest to date (Fruman and Rommel 2014;Toska and Baselga 2016). We hypothesized that the limited ability of PI3K-AKT-mTOR inhibitors to induce cancer cell death was due to the autophagy-mediated metabolic reprogramming that enabled drug-tolerant cells to survive despite therapy-enforced nutrient restrictions. Our hypothesis was based on the knowledge that nutrients derived from autophagic degradation are reused to maintain macromolecular synthesis and or oxidized to maintain bioenergetics (Galluzzi et al. 2015). Additionally, due to the central role that the PI3K-AKT-mTOR pathway has in regulating cellular growth, we reasoned that small molecule inhibitors that converge directly or indirectly on this pathway would similarly induce autophagy to sustain drug-tolerant cells, therefore extending the reach of this mechanism of resistance beyond specific PI3K-AKT-mTOR inhibitors. Thus far, the therapeutic reflex to block autophagy is to add anti-malarial lysosomotropic inhibitors such as chloroquine, but the clinical responses to these drugs have been variable and noncurative (Goldberg et al. 2012;Shanware et al. 2013;Rosenfeld et al. 2014;Towers and Thorburn 2016). Therefore, it would be clinically impactful to directly target the metabolic enzymes mediating autophagy-fueled metabolic processes on which drug-tolerant cells are dependent. However, there have not been any therapeutically tractable metabolic enzymes identified in the setting of therapy-induced autophagy. Here, we identify CYT387, a JAK inhibitor that induces autophagy by inhibiting mTOR complex 1 (mTORC1). Consequently, by relieving the inhibitory signal transmitted from mTORC1 to PI3K, treatment with CYT387 leads to activation of the PI3K-mTORC2/AKT pathway. Combining CYT387 with MK2206, an allosteric AKT inhibitor, did not induce any tumor regressions despite effectively inhibiting PI3K-AKT-mTORC1/2 activation. Notably, the combination treatment further increased autophagy. This suggested that inhibition of signaling pathways alone would be insufficient to kill all tumor cells. Subsequently, we performed global metabolic profiling to systematically document the immediate metabolic adaptations effected by the therapy-induced autophagic processes. We show that autophagy-mediated metabolic adaptations supported cancer cell survival. Autophagy was required for these metabolic adaptations because these changes were abrogated in cells deficient for the essential autophagy gene ATG5. Subsequently, we identified that phospholipase A2 (PLA2), the rate-limiting enzyme responsible for catalyzing the breakdown of phospholipids to lysophospholipids and fatty acids, had an important role in the survival of cancer cells. Pharmacological inhibition of this enzyme dampened oxidative phosphorylation (OXPHOS) and further increased apoptosis when combined with CYT387-MK2206 combination treatment. Our findings highlight a previously unappreciated role for PLA2 in conferring a survival advantage to drug-tolerant cancer cells in metabolically restricted environments, demonstrate that this enzyme supports autophagy-induced metabolic reprogramming, and, importantly, provide a path forward for novel cotreatment strategies. CYT387 induces autophagy through the modulation of the PI3K-AKT-mTOR pathway To precisely identify which cancer drugs induce autophagic flux by inhibiting the mTORC1 pathway, we used a library of 116 clinically focused and mechanistically annotated compounds that included activity against two-thirds of the tyrosine kinome as well as other nontyrosine kinase pathways on a human renal cell carcinoma (RCC) cell line, ACHN (Leonard et al. 2016;Maxson et al. 2013Maxson et al. , 2016) (see Supplemental Fig. S1A for a schematic of the workflow; see Supplemental Table 1 for a list of drugs and known targets). We monitored mTORC1 activity through phosphorylation of S6 and combined this with a measurement of p62 steady-state levels as an initial screen of autophagy flux (Joachim et al. 2015) in a highcontent imaging screen. Remarkably, the screen identified several structurally different Janus kinase (JAK) inhibitors as potent inducers of autophagic flux; namely, pan-Jak inhibitor (JAK1, JAK2, and JAK3), Go6978 (JAK 2), ruxolitinib (Jak1 and Jak2), and CYT387 (JAK1 and JAK2). All four drugs potently inhibited S6 phosphorylation, pointing to a mTORC1dependent mechanism. Since JAK inhibitors as a class of compounds scored highly in our screen and because CYT387 was the most potent JAK inhibitor to induce autophagic flux and simultaneously decrease S6 phosphorylation in solid tumor cells in our screen, we selected this small molecule for further validation. CYT387 (momelutinib) is an orally available JAK1-2 inhibitor that has improved splenomegaly and reduced anemia in myeloproliferative neoplasia (MPN) patients (Patel et al. 2016;Winton and Kota 2017). In support of this, CYT387 suppressed the phosphorylation of JAK; its substrate, STAT3; and S6 in human RCC and MPN cell lines (Supplemental Fig. S1B,C). CYT387 induces autophagy that is reversible-as seen by the reduction in LC3B lipidation within 24 h of removal of drug-and correlated with reversal of the p-STAT3, p-S6, and p-AKT phosphorylation patterns (Supplemental Fig. S1D). CYT387 treatment of ACHN human RCC cells plated on coverslips resulted in decreased p62 protein expression and phosphorylated S6 levels by immunofluorescence staining, confirming our high-content imaging finding (Fig. 1A). Accordingly, we observed that treatment with CYT387 induced autophagy in multiple human RCC and MPN cell lines and was primarily cytostatic (Supplemental Fig. S1E,F). Immunoblots confirmed the induction of autophagy by CYT387, as seen by the conversion of LC3-I to LC3-II, the degradation of p62, and inhibition of mTORC1 (as seen by decrease in phosphorylated S6) (Fig. 1B). We additionally confirmed that CYT387 treatment induced autophagic flux by several different methods. (1) We stably expressed a mChery-EGFP-LC3 reported in ACHN cells, which takes advantage of the fact that EGFP fluorescence is quenched in the acidic environment of the autolysosome relative to mCherry (Debnath 2008). CYT387 treatment resulted in decreased expression of green-yellow cells and increased expression of red cells (Supplemental Fig. S2A). (2) We stained CYT387-treated ACHN cells with the autofluorescent compound monodansylcadaverine (MDC), a marker of autolysosomes, and found that CYT387 increased MDC autofluorescence (Supplemental Fig. S2B; Turcotte et al. 2008). (3) CYT387 increased LC3-II levels in ACHN cells, and this increase was more pronounced in the presence of Figure 1. CYT387 induces autophagy in human cancer cell lines and patient-derived models. (A) ACHN cells were grown on coverslips, treated with CYT387 for 24 h, and stained p62 and p-S6. (B) ACHN cells were treated with increasing doses of CYT387 (0-2 µM) and immunoblotted with LC3, p62, p-S6, total S6, p-STAT3, total STAT3, and β-actin. (C ) ATG5 +/+ and ATG5 −/− MEFs were treated with 0-3 µM CYT387 for 24 h, and LC3 processing was evaluated by immunoblotting. (D) Immunoblot for LC3 and p-STAT3 and ATG5 in ACHN cells transiently transfected with siRNA against ATG5. β-Actin was used as a loading control. (E) Patient-derived organotypic cultures were treated with CYT387 for 24 h and stained with p-S6 and LC3B (images from two different patient tumors are shown). (F,G) Quantification of p-S6 (F ) and LC3B (G) staining in patient-derived organotypic cultures (bar graph). n = 10 patients. (H) Supervised hierarchical clustering heat maps of phosphoserine and phosphothreonine (pST) peptides and phosphotyrosine (pY) peptides identified from CYT387-treated and untreated ACHN and SN12C human RCC cells with two technical replicates. Five-hundred-thirteen unique pST phosphopeptides (rows) and 180 unique pY phosphopeptides were either fourfold more enriched or fourfold less enriched, on average (t-test, P < 0.2), in CYT387-treated cells compared with untreated cells (phosphopeptide lists are in Supplemental Tables 2, 3). (I) Relative phosphorylation abundance between ACHN and SN12C CYT387-treated and untreated cells. RPS6 S 236 , S 240 was found in the pST unsupervised heat map. (Ctrl) Control; (CYT) CYT387. ( * ) P < 0.05; ( * * ) P < 0.01; ( * * * ) P < 0.001, unpaired t-test. (J) Kinase substrate enrichment analysis (KSEA) of CYT387-treated and untreated pST data. Hits > 5; false discovery rate < 0.05. A positive NKS (normalized Kolmogorov-Smirnov score) infers greater kinase activity in CYT387-treated cells, while a negative NKS indicates greater activity in untreated cells (unfiltered summary is in Supplemental Table 4). (K) KEGG (Kyoto Encyclopedia of Genes and Genomes) pathways found from DA-VID analysis of relatively active genes due to CYT387 treatment. The DAVID input list was generated by interpreting functional annotations of pSTY data (the complete pathway list is in Supplemental Table 2). E64D/pepstatin (which inhibits the protease-induced reconversion of LC3-II into LC3-I), consistent with an increase in autophagosome formation (Supplemental Fig. S2C;Tanida et al. 2005). (4) CYT387 increased the number of double-membraned autophagosomes, which are pathognomonic of autophagy as determined by transmission electron microscopy (Supplemental Fig. S2D; Klionsky et al. 2016). Notably, CYT387 was able to induce autophagy in a dose-dependent manner in murine embryonic fibroblasts (MEFs) that retained the essential autophagy gene ATG5 (ATG5 +/+ ), as seen by the lipidation of LC3 (Fig. 1C) (Cecconi and Levine 2008;Fung et al. 2008). Conversely, CYT387 did not induce autophagy in ATG5-deficient cells (ATG5 −/− ). Likewise, CYT387-induced autophagy was abrogated with siRNA depletion of ATG5 in ACHN cells (Fig. 1D). To extend our studies into clinical samples, we exposed patient-derived RCC organotypic cultures to CYT387 treatment for 24 h. Importantly, CYT387 significantly induced LCB expression while simultaneously reducing phosphorylated S6 levels (Fig. 1E,F). Taken together, these results indicate that CYT387 treatment induces autophagic flux in both human RCC cell lines and patient-derived tumors. To obtain further insight into the signaling pathways affected by CYT387 treatment, we studied changes in the phosphoproteome of two different human RCC cells (ACHN and SN12C) after CYT387 treatment using quantitative phosphoproteomics (Rush et al. 2005;Moritz et al. 2010;Zhuang et al. 2013). Supervised hierarchical clustering revealed that 513 phosphoserine and phosphothreonine (pST) peptides and 180 phosphotyrosine (pY) peptides significantly differed between treated and untreated cells ( Fig. 1H; Supplemental Tables 2-9). We observed two phosphopeptides to be hypophosphorylated at inhibitory residues T1462 and S1798 in tuberous sclerosis complex 2 (TSC2) in CYT387-treated cells (Manning et al. 2002;Roux et al. 2004). Rapamycin-insensitive companion of mTOR (RICTOR) in CYT387-treated cells was hypophosphorylated at T1135. RICTOR is a subunit of mTORC2 (Kim et al. 2017), but the phosphorylation of T1135 is mediated by mTORC1 via induction of the p70S6 kinase (Julien et al. 2010) and impedes the ability of mTORC2 to phosphorylate AKT on S473 ( Fig. 1I; Dibble et al. 2009). As expected, ribosomal protein S6 at residues S236 and S240 and STAT3 Y705 trended toward hypophosphorylation, and p70S6 kinase (RPS6KB) was significantly less active in CYT387-treated cells based on kinase substrate enrichment analyses (KSEAs) ( Fig. 1J; Drake et al. 2012). However, KSEAs of AKT motifs were inconclusive, as some motifs trended toward increased activity and others trended toward decreased activity in CYT387-treated cells. DAVID analysis of genes corresponding to the phosphopeptides and activated in CYT387-treated cells (Supplemental Tables 10, 11) also revealed several KEGG (Kyoto Encyclopedia of Genes and Genomes) pathways that are biologically relevant to CYT387 treatment, including glycolysis, amino acid biosynthesis, and central carbon metabolism (Fig. 1K;Huang da et al. 2009a,b). In support of these phosphoproteomics findings, mRNA analysis of CYT387-treated ACHN cells using gene set enrichment analysis (GSEA) of multiple independent data sets revealed significant enrichment of genes involved in several metabolic pathways, while biological modules associated with mTOR (e.g., cell cycle and protein synthesis) were anti-correlated with CYT387 treatment. (Supplemental Tables 12, 13). Collectively, the phosphoproteome and transcriptome data provide strong evidence that CYT387 treatment reduces mTORC1 signaling to increase TSC2 and mTORC2 signaling leading to AKT activation and is coupled with changes in metabolic pathways. PI3K-AKT-mTOR inhibition treatment restrains tumor growth but does not induce tumor regression We reasoned that the CYT387-induced inhibition of mTORC1 would relieve the inhibitory feedback signal normally transmitted from mTORC1 to PI3K, as the phosphoproteomic data suggested via KSEA, and that this would result in hyperactivation of PI3K and AKT, with consequent prosurvival signaling. Consistent with this interpretation, CYT387 treatment caused an increase in AKT T308, the PDK-1-catalyzed site that serves as readout for PI3K signaling in a time-dependent manner (Supplemental Fig. S3A,B). Notably, CYT387 did not dephosphorylate ERK (Supplemental Fig. S3C). Therefore, we sought to identify PI3K-AKT pathway inhibitors that would effectively cooperate with CYT387 to induce apoptosis. We used GDC-0941, a pan-PI3K inhibitor (Sarker et al. 2015); BX795, a PDK-1 inhibitor (Dangelmaier et al. 2014); and MK2206 (Yap et al. 2011), an allosteric AKT inhibitor, to chemically deconstruct this signaling pathway, as depicted in the schematic (Supplemental Fig. S3D-F). We first assessed the biologic effects of these inhibitors on proliferation and apoptosis in human RCC cells singly and in combination with CYT387 ( Fig. 2A -D). While GDC-0941, BX795, and MK2206 alone exhibited some anti-proliferative effects, the combination with CYT387 resulted in significantly greater inhibition of proliferation in ACHN and SN12C cells. In marked contrast, all drugs as single agents had little or no effect on apoptosis, but the combination of either agent with CYT387 resulted in increased apoptosis. This was most striking in the CYT387 and MK2206 combination (Fig. 2B,D), and we therefore selected MK2206 for further in vivo studies. We investigated the mechanisms by which MK2206 and CYT387 cooperated to suppress tumor growth in RCC cells (Fig. 2E,F). MK2206 effectively inhibited AKT activation, as documented by dephosphorylation of both p-AKT Thr308 and p-Ser473 and the AKT substrate PRAS40. Consistent with prior results, suppression of AKT induced autophagy, as seen by the conversion of LC3-I to LC3-II. Suppression of mTORC1 by CYT387 led to feedback activation of PI3K, as seen by the increase in phosphorylation of p-AKT Thr308 (which serves as a readout for PI3K activity) and mTORC2 (as monitored by AKT Ser473 phosphorylation). Subsequently, combining MK2206 with CYT387 effectively inhibited both AKT and mTORC1 to almost undetectable levels and induced apoptosis (cleaved caspase 3). Thus, by inhibiting the PI3K-AKT- (F) Immunoblot for LC3, p-AKT Thr308, p-AKT Ser473, total AKT, p-PRAS40, total PRAS40, p-S6, total S6, p-STAT3, total STAT3, and β-actin. (G) Patient-derived organotypic cultures treated with DMSO (control), CYT387, MK2206, and the CYT387+MK2206 combination for 24 h exhibit an increase in LC3B (green) and a decrease in p-S6 (red) and p-AKT (red). (H) ATG5 +/+ and ATG5 −/− MEFs were treated with 2 µM CYT387, 10 µM MK2206, and the combination for 24 h, and LC3, cleaved caspase 3, p-AKT, p-S6, p-STAT3, and β-actin were evaluated by immunoblotting. (I ) ACHN xenografts treated with vehicle, 50 mg/kg CYT387, 60 mg/kg MK2206, and a 50 mg/kg CYT387 + 60 mg/kg MK2206 combination. Tumor volume is shown. Error bars represent mean ± SEM. Control versus CYT387+MK2206, ( * * ) P < 0.01. mTOR pathway at proximal and distal nodes, CYT387 and MK2206 combine to shut down PI3K oncogenic signaling. However, autophagy still persisted in the combination treatment, pointing to a survival signal that sustains subpopulations of drug-tolerant cancer cells. Notably, the CYT387-MK2206 combination induced autophagy in patient-derived organotypic RCC cultures (Fig. 2G). To further define the role of treatment-induced autophagy in mediating survival, we assessed the effects of CYT387 and MK2206 combination treatment on ATG5 −/− and ATG5 +/+ MEFs. The CYT387-MK2206 cotreatment induced more apoptosis in ATG5 −/− MEFS than it did in wild-type controls (demonstrated by an increase in cleaved caspase 3), indicating that autophagy protects cells from apoptosis (Fig. 2H). Collectively, these results suggest that despite effective inhibition of PI3K-AKT-mTOR signaling with the resultant induction of apoptosis, cancer cells are able to simultaneously induce an autophagic-fueled survival pathway. We next examined the safety and efficacy of CYT387 and MK2206 cotreatment in vivo in two xenograft tumor models. While CYT387 or MK2206 alone exhibited an anti-tumor effect on ACHN and SN12C xenografts, the combination of CYT387 with MK2206 resulted in significantly greater tumor growth inhibition in ACHN and SN12C tumor xenografts (P < 0.001) (Fig. 2I,L). Importantly, combination treatment was well tolerated, with no weight loss recorded (Supplemental Fig. S3G,H). Pharmacodynamic studies demonstrated that combination therapy led to the suppression of S6 and AKTS473 phosphorylation (Supplemental Fig. S3I). Consistent with our in vitro finding, CYT387 alone had a minimal impact on apoptosis. In marked contrast, combination treatment with CYT387 and MK2206 resulted in a significant increase in apoptosis (established by an increase in cleaved caspase 3; P < 0.001) ( Metabolic reprogramming is supported by redox homeostasis The lack of tumor regression despite effective inhibition of PI3K-AKT-mTOR signaling led us to question whether metabolic reprogramming may sustain the survival of the treated cancer cells. The PI3K-AKT-mTOR pathway regulates multiple steps in glucose uptake and metabolism (Duvel et al. 2010). Therefore, we hypothesized that CYT387 and MK2206 treatment singly and in combination would negatively impact glucose uptake, aerobic glycolysis, and, subsequently, biosynthetic pathways, resulting in a drug-enforced reduction in glucose availability in the microenvironment. To determine the contribution of CYT387 and MK2206 treatment on the regulation of glycolysis, we measured glucose uptake by 18 F-fluoro-deoxyglucose ( 18 FDG), lactate excretion, and the extracellular acidification rate (ECAR) as readouts for glycolysis. CYT387, MK2206, and the combination significantly decreased glucose uptake and reduced lactate production in vitro (Fig. 3A,B). The dramatic difference between lactate/ glucose ratio in extracellular medium further supports the finding that CYT387 and MK2206 cotreatment inhibits glycolysis (control: 1.51; CYT387: 0.65; MK2206: 0.81; CYT387+MK2206: 0.37). This impaired carbon metabolism with treatment also resulted in a reduction of cell size (Fig. 3C). Consistent with the above finding, CYT387, MK2206, and the CYT387-MK2206 combination significantly reduced the ECAR (Fig. 3D,E). Decreased glucose availability with cotreatment might also be reflected in changes with OXPHOS activity, as measured by oxygen consumption rate (OCR; an indicator of OXPHOS). However, we found that the OCR/ECAR ratio increased after cotreatment, suggesting a predominant decrease in glycolysis with the maintenance of mitochondria-driven OXPHOS (Fig. 3F). Consistent with glucose limitation and decreased glycolysis, we observed increased AMPK phosphorylation at Thr172, an established indicator of metabolic stress (Fig. 3G). Importantly, in the setting of glucose deprivation and impairment of the pentose phosphate pathway (PPP), AMPK has been shown to increase NADPH levels from increased fatty acid oxidation. Specifically, we noted increased levels of NADPH, maintenance of GSSG/GSH ratios, and a resultant mitigation of reactive oxygen species (ROS) ( Fig. 3H-J). These findings are consistent with the role of AMPK in mitigating metabolic stress and promoting cancer cell survival (Jeon et al. 2012). Additionally, AMPK would be predicted to further inhibit mTOR (Inoki et al. 2003;Gwinn et al. 2008). By comparison, we did not see any reduction in PKM2 levels, suggesting that the metabolic switch from aerobic glycolysis to OXPHOS is not dependent on pyruvate kinase activity (Christofk et al. 2008). Overall, these findings suggest that by decreasing glucose levels, CYT387-MK2206 cotreatment severely reduces the glycolytic capacity needed to supply the bioenergetics needs of the RCC cells. Importantly, this treatment-induced nutrient-depleted condition, while suppressing proliferation, simultaneously promotes survival by regulating NADPH homeostasis and maintaining mitochondrial-driven oxidation. PI3K-AKT-mTOR treatment-induced autophagy promotes phospholipid metabolism Therefore, to comprehensively determine how autophagy contributes to the metabolic needs, we performed global metabolic analysis using a liquid chromatography-tandem mass spectrometry (LC-MS/MS)-based platform (Louie et al. 2016). These studies revealed that CYT387 and MK2206, singly and in combination, effected changes across multiple pathways ( Fig. 4A; Supplemental Table 14). Consistent with the role of the PI3K-AKT-mTOR pathway in the regulation of glycolysis, treatment with these agents was accompanied by reductions in glucose, glucose-6-phosphate, DG3P, PEP, pyruvate, and lactate, consistent with the inhibition of glycolysis (Supplemental Fig. S4A), as described above and also concordant with the gene expression data. Similarly, we also observed reductions in PPP intermediates, amino acids, tricarboxylic acid (TCA) cycle intermediates, and ribose biosynthesis and corresponding increases in purine breakdown products guanine and hypoxanthine (Supplemental Fig. S4B-E). These findings are in keeping with a nutrient-deprived state (i.e., decreased anabolism) with subsequent increased autophagic catabolism to maintain survival (Mizushima et al. 2001). Cells adapt to glucose deprivation by subsisting on fatty acids-mobilized through glycerolipid remodeling-for oxidation, and this is consistent with our observation that the most significant metabolite changes were in lipid intermediates, including phospholipids, triacylglycerol (TAG), cholesterol esters, diacylglycerol (DAG), and fatty acids (C16:0, C18:0, and C18:1) ( We further investigated the lipid substrates that were catabolized by autophagy to produce fatty acids for fatty acid oxidation. Steady-state metabolite profiling showed significant increases in lysophospholipids and arachidonic acid (C20:4), with corresponding decreases in their phospholipid precursors (Fig. 4B). Phospholipids, which include phosphatidylcholine (PC), phosphatidylethanolamine (PE), phosphatidylserine (PS), phosphatidylglycerol (PG), and phosphatidylinositol (PI), are major structural components of cellular membranes. PLA2 is the enzyme that catalyzes the hydrolysis of the phospholipid sn-2 ester bond with subsequent release of lysophospholipids; e.g., lysophosphatidylcholine (LPC), alkly-lysophosphatidylcho-line (alkyl-LPC), and free fatty acids (Murakami et al. 2011). Accordingly, we found elevated levels of C16:0 LPC, C18:0 LPC, C18:1 LPC, and C18-0 alkyl-LPC and corresponding decreases in their phospholipid precursors. Notably, we observed significant decreases in free fatty acids (C16:0, C18:0, and C18:1), supporting the idea that phospholipids are hydrolyzed to supply fatty acids for fatty acid oxidation. Consistent with increased arachidonic acid levels in CYT387-MK2206-cotreated cells, we observed increased levels of 14,15-EET, 11,12-EET, 8,9-EET, and 5-HETE, pointing to arachidonic acid P450-mediated generation of eicosanoids (Supplemental Fig. S5). PI3K-AKT-mTOR treatment-induced autophagy facilitates lipid droplet (LD) formation and mitochondrial respiration To protect cells from the destabilizing effects of excess lipids, free fatty acids mobilized by autophagy and destined for oxidation are stored in an intermediate intracellular pool: LDs (Thiam et al. 2013). We reasoned that the large changes in glycerolipid redistribution identified by our metabolomics profiling of treated cells would result in an increased number of LDs to support fatty acid oxidation, with subsequent mobilization of fatty acids to mitochondria under these nutrient-depleted conditions (Rambold et al. 2015). Consistent with this, we observed that CYT387 and MK2206 singly and in combination incrementally and significantly increased the number and size of Bodipy 493/503-labeled ( Fig. 5A-C, green) LDs. Additionally, we incubated ACHN human RCC cells with Bodipy-C12-HPC (a phospholipid containing green fluorescent long chain fatty acid) followed by treatment with vehicle or the CYT387-MK2206 combination. CYT387-MK2206 cotreatment led to a greater degree of incorporation of Bodipy-C12-labeled fatty acids into LDs relative to vehicle-treated cells. This suggests that CYT387-MK2206 treatment-induced autophagy results in phospholipid hydrolysis that releases fatty acids, which are subsequently incorporated into new LDs (Supplemental Fig. S6). To determine whether the increase in LDs occurred in vivo, we stained the vehicle, CYT387, MK2206, and CYT387-MK2206-cotreated xenograft tumors for adipophilin, which belongs to the perilipin family, members of which coat intracellular lipid storage droplets and facilitate metabolic interactions with mitochondria (Sztalryd and Kimmel 2014). Consistent with the in vitro data, the number of adipophilin-positive LDs significantly and incrementally increased with treatment (as measured on treatment day 40 in ACHN xenograft tumors; CYT387<MK2206<CYT387+MK2206; P = 0.0046) (Fig. 5D), indicating that these drug treatments stimulate the formation of LDs in vivo. Collectively, these data suggest that the early adaptive and survival changes effected by the initial drug treatment continues to support the maintenance of long-term in vivo tumor growth. Next, to further determine whether autophagy contributed to LD numbers, we treated ATG5 +/+ and ATG5 −/− MEFs with CYT387, MK2206, and the combination. Autophagy-competent ATG5 +/+ MEFs were able to significantly increase LD numbers (Fig. 5E). In marked contrast, none of the treatments was able to increase LDs in ATG5 −/− MEFs, confirming that autophagy is required to sustain LD levels (Fig. 5F). To investigate the metabolic ramifications of this, we compared oxygen consumption by ATG5 −/− and ATG5 +/+ MEFs when treated with CYT387, MK2206, and the combination. We found that CYT387-MK2206 cotreatment profoundly decreased the ATG5 −/− MEFs' mitochondrial OCR and spare respiratory capacity (SRC; the quantitative difference between the maximal OCR and the initial basal OCR), indicating that ATG5 −/− MEFs function near their maximal rate and are unable to maintain an adequate level of mitochondrial respiration with CYT387+MK2206 cotreatment due to a deficit in their metabolic compensatory mechanisms (Fig. 5G,H). In contrast, we observed no decrease in mitochondrial OCR and SRC in CYT387-MK2206-cotreated ATG5 +/+ MEFs. The extent of the metabolic difference between vehicletreated ATG5 −/− and CYT387+MK2206-cotreated MEFs was apparent in the overall ratio of OXPHOS to aerobic glycolysis (OCR/ECAR), which was twofold higher in vehicle-treated than in CYT387-MK2206-cotreated ATG5 −/− MEFs, reflective of the need for autophagy to supply the nutrients to maintain mitochondrial OCR under treatment-enforced metabolic restrictions (Fig. 5I). This is in line with a model in which autophagy of cellular organelles and membranes during nutrient deprivation produces fatty acids that supply the LD pool, where they are then transferred into mitochondria for β-oxidation. In support of this, we observed that treated RCC cells had significantly increased numbers of mitochondria (Fig. 5J). Accordingly, dual staining of treated ACHN cells with a mitochondrial marker (Mitotracker orange) and LDs with Bodipy (Fig. 5K, green) revealed that the LDs were closely associated with the mitochondria, potentially enabling the fatty acids released from LDs to traffic directly from LDs to mitochondria and maximizing the fatty acid oxidation ( Fig. 5K; Rambold et al. 2015). Importantly, cancer cells become increasingly dependent on mitochondrial fatty acid oxidation in nutrient-depleted conditions ( Fig. 5L; Cabodevilla et al. 2013). Consistent with this, using the Mito Fuel Flex test, we found that human ACHN RCC cells' dependence on fatty acid doubled with CYT387-MK2206 cotreatment (Fig. 5M). Consequently, oxidation of endogenous fatty acids significantly contributed to the OXPHOS rate in MK2206+CYT387-cotreated cells compared with control (>2.5-fold increase; P < 0.0001) (Fig. 5N). Consistent with this, induction of fatty acid oxidation by CYT387-MK2206 cotreatment was attenuated in ATG −/− MEFs (Supplemental Fig. S7). In contrast, glutamine-supported OCR represented a minority of total OCR in CYT387-MK2206-cotreated ACHN cells (Supplemental Fig. S8). Taken together, this suggested that cellular lipid remodeling by the autophagy-lysosome system may supply a considerable fraction of the intracellular lipids-fatty acids irrespective of their external availability. Inhibiting PLA2 activity decreases autophagy-induced LDs, limits OXPHOS, and increases apoptosis Our data implicated hydrolysis of phospholipids as a critical mechanism for the generation of lysophospholipids and fatty acids for fatty acid oxidation in treated RCC cells, and, therefore, inhibition of this enzymatic activity would negatively impact OXPHOS and subsequently limit the survival of these cells. To test this directly, we added the PLA2 inhibitor oleyloxyethylphosphocholine (OOEPC; which inhibits secretory PLA) (Slatter et al. 2016) to CYT387, MK2206, and CYT387-MK2206cotreated cells and measured LD numbers. Addition of OOEPC significantly reduced the LD abundance in CYT387, MK2206, and CYT387-MK2206-cotreated cells Figure 5. Autophagy is required for LD growth and fatty acid oxidation. (A) ACHN cells were treated with control, CYT387, MK2206, and CYT387+MK2206 for 24 h, and Bodipy 493/503 (green) was added to visualize LDs. Representative images are shown. n = 5 experiments. (B,C) Bar graphs quantify the increase in number (B) and size (C ) of LDs, respectively. Data are expressed as means ± SEM. ( * ) P < 0.001 for control versus CYT387, control versus MK2206, and control versus CYT387+MK2206. (D) Adipophilin staining in xenograft tumors quantifies the increase in LDs in vivo. n = 9. Data are expressed as means ± SEM. ( * ) P < 0.01 for control versus CYT387 and control versus MK2206, measured in tumors resected after 40 d of treatment. (E) ATG5 +/+ MEFs were treated with 2 µM CYT387, 10 µM MK2206, and the combination for 24 h. Bodipy was added, and the LD number was measured. n = 500 cells. ( * ) P < 0.001 for control versus CYT387 and control versus CYT387+MK2206; P < 0.005 for control versus MK2206. (F ) ATG5 −/− MEFs treated as in E. Bodipy was added, and the LD number was measured. n = 500 cells. P = NS (no significance between treatment groups). (G) ATG5 +/+ and ATG5 −/− MEFs were treated with DMSO (control), CYT387, MK2206, and CYT387+MK2206 for 24 h, and then OCRs (indicator of OXPHOS) were determined using a XF-96 extracellular flux analyzer during sequential treatments (dotted vertical lines) with oligomycin, FCCP, and rotenone/ anti-mycin (A+R). Spare respiratory capacity (SCR) is the quantitative difference between maximal uncontrolled OCR (top horizontal dashed line) and the initial basal OCR (bottom horizontal dashed line). Shown are OCR means ± SD of experimental triplicates. For ease of viewing, only control and CYT387 + MK2206 data are graphed. (H) SRC (the percentage maximum OCR after FCCP injection of baseline OCR) of ATG5 +/+ and ATG5 −/− MEFs after the indicated treatments. Shown are means ± SD of experimental triplicates. (I) Ratios of OCR to ECAR (indicator of aerobic glycolysis) at baseline of ATG5 +/+ and ATG5 −/− MEFs after the indicated treatments. (J) ACHN cells were treated with control, CYT387, MK2206, and YT387+MK2206 for 24 h, and Mitotracker orange was added to visualize mitochondria. Representative images are shown. n = 5 experiments. Mitochondria number was measured, and data are expressed as means ± SEM. ( * ) P < 0.001 for control versus CYT387, control versus MK2206, and control versus MK2206+CYT387. (K) Dual staining of Bodipy and Mitotracker orange demonstrate close proximity of LDs with mitochondria in CYT387+MK2206-cotreated ACHN cells (a representative image is shown). (L) Global metabolite profiling reveals a preferential decrease in lipids. (Decrease) Abundance <0.5-fold in treated cells compared with the vehicle; (increase) abundance greater than twofold in treated cells compared with the vehicle. (M) Fatty acid fuel dependency measures the reliance of ACHN cells on fatty acids to maintain baseline respiration. ACHN cells were treated with DMSO (control) or CYT387+MK2206 for 24 h, and OCR was measured during the Seahorse XF Mito Fuel Flex assay. The percentage of dependence on fatty acids was calculated by quantifying the change in basal OCR after fatty acid oxidation was blocked using 4 µM CPT-1a inhibitor etomoxir divided by the total change in OCR from baseline after combined inhibition of fatty acid, glutamine, and pyruvate oxidation using 4 µM eomoxir, 3 µM BPTES, and 2 µM UK5099, respectively (representative graph). n = 2. Fatty acid fuel flexibility was calculated by measuring the change in sensitivity to etomoxir's inhibition of OCR after blockade of glutamine and glucose oxidation and represents the ability of ACHN cells to increase oxidation of fatty acid when glutamine and pyruvate utilization is precluded. (N) Measurement of fatty acid-driven OCR, measured by acute inhibition of CPT-1a using 4 µM etomoxir ( P < 0.01) and represented as a percentage of total mitochondria OCR calculated using mitochondrial complex I and III inhibitors 1 µM rotenone and 1 µM anti-mycin, respectively. (Fig. 6A,B). Since several isoforms of PLA2 exist, we determined their role in reducing LDs. We found that inhibition of calcium-sensitive PLA2 (with cPLA2i) and calcium-insensitive PLA2 (with bromoenol lactone ) was also able to reduce LD number, consistent with the rate-limiting role of PLA2 in mediating phospholipid hydrolysis (Supplemental Fig. S9). To document the kinetics of the new pool of CYT387-MK2206-induced LDs, we performed a time-course experiment to monitor the appearance of LDs following CYT387-MK2206 cotreatment and ascertained that LDs appeared 2 h after treatment and then continuously increased in number during the next 24 h of monitoring. In contrast, simultaneous addition of OOPEC to the CYT387-MK2206 combination at the start of treatment completely blocked the appearance of LDs. Similarly, addition of OOPEC at 2 h after cotreatment with CYT387+MK2206 completely inhibited any further increase in LDs. Subsequently, the addition of etomoxir at 8 h (which blocks the utilization of fatty acids) resulted in LD accumulation in OOPEC+CYT387+MK2206-treated cells. These results demonstrate that PLA2 activity is required for LD generation after CYT387-MK2206 cotreatment and that OOPEC is able to inhibit PLA2 activity (Supplemental Fig. S10). To directly test the metabolic impact of OOEPC treatment, we first assessed changes in the OCR. We observed a marked decrease in the basal OCR when OOEPC was added to the CYT387-MK2206 combination. Importantly, the addition of OOPEC profoundly reduced the SRC, indicating that the inhibition of PLA2 decreases mitochondrial oxidation by reducing fatty acid supply and impedes the cells' capacity to respond to increased energetic demands (Fig. 6C,D). The marked reduction in SRC was similar to our earlier observations in CYT387-MK2206treated ATG5 −/− MEFs and is consistent with the model in which autophagy-supplied LDs are required to support mitochondrial OCR in metabolically restricted environments ( Fig. 5G-I). Next, by plotting OCR versus ECAR, we determined the effect of PLA2 inhibition by OOEPC on CYT387-MK2206-treated tumors; this measurement highlighted that untreated ACHN human RCC cells have higher OXPHOS and glycolysis compared with CYT387-MK2206-cotreated cells (Fig. 6E). The addition of OOEPC markedly decreased OCR in ACHN cells, indicating that these treatments diminished the overall metabolic activity of the cancer cells. This observed reduction in bioenergetic metabolism led us to determine whether PLA2 inhibition would have an impact on proliferation and apoptosis. Cotreatment with OOEPC had a minimal additional effect on proliferation (Fig. 6F). In contrast, the addition of OOEPC significantly increased apoptosis, consistent with its ability to reverse autophagy-supplied fatty acids that enable survival (Fig. 6G). To further verify that PLA2 inhibition impacted cancer cell survival, we tested a distinct PLA2 inhibitor, varespladib, which has been clinically developed for cardiovascular diseases (Rosenson et al. 2010). Similar to OOEPC, the addition of varespladib to CYT387-MK2206-treated cells decreased LDs and increased apo-ptosis ( Fig. 6H-J). Collectively, these data indicate that treatment-induced autophagy provides lysophospholipids and free fatty acids to maintain cancer cell survival despite nutrient depletion. Discussion It is now generally accepted that autophagy is cytoprotective in the setting of cancer therapies by enabling cancer cells to mitigate metabolic and therapeutic stresses, thereby ensuring survival (Amaravadi et al. 2011;Sehgal et al. 2015;Rebecca and Amaravadi 2016). To date, the therapeutic reflex to block autophagy is to add antimalarial lysosomotropic inhibitors such as chloroquine. However, the clinical responses to these have been underwhelming (Goldberg et al. 2012;Shanware et al. 2013;Rosenfeld et al. 2014;Towers and Thorburn 2016). While the role of autophagy in tumor initiation and progression has been well-documented, little is known about how treatment-induced autophagy mediates cytoprotection and resistance. Our results demonstrate that cancer cells, when acutely exposed to small molecule inhibitors, activate the autophagic process to ensure early and lasting metabolic adaptations designed to enhance survival in a nutrientdepleted environment. We first observed the maintenance of OXPHOS when glucose became limiting due to treatment. Likewise, the coordinate activation of AMPK signaling ensures protective redox homeostasis to mitigate increased ROS produced by OXPHOS. Finally, we demonstrated activation of autophagy-mediated membrane glycerophospholipid metabolism with subsequent fatty acid oxidation to generate energy. Accordingly, we found that therapy-induced autophagy purposefully harnesses core biological processes to secure tumor cell fitness and survival. Our experiments involving autophagy-incompetent ATG5 −/− MEFs demonstrate that autophagy is required under conditions of nutrient depletion to generate LDs and maintain mitochondrial OCR and SRC. It is not coincidental that LD depletion by pharmacological PLA2 inhibition achieved similar results. This is consistent with the model that autophagic digestion of phospholipids, with subsequent hydrolysis within the autolysosome, provides LDs with a constant supply of lipids, which can then be trafficked to the mitochondria to maintain mitochondrial respiration. The subsequent release of these fatty acids from LDs to fuel β-oxidation may occur independently of lipophagy, as others have observed (Rambold et al. 2015). Additionally, another possible source of fatty acids and amino acids may come from extracellular lysophospholipids and proteins through macropinocytosis. This study further addresses the wider question of how cancer cells survive despite the inhibition of mTOR (an evolutionarily conserved master regulator of cell metabolism, proliferation, growth, and survival) and AKT (a committed prosurvival kinase that positively regulates these same processes in both normal and cancer cells) (Manning and Cantley 2007;Laplante and Sabatini 2012). Undoubtedly, the combination of attenuated proliferation signals, nutrient depletion, and metabolic competition for remaining nutrients kills many cells. Accordingly, our data demonstrate that glucose, which is tightly regulated by the PI3K-AKT-mTOR pathway at multiple steps, became limiting with treatment, with a resultant decrease in glycolysis (Engelman et al. 2006;Yecies and Manning 2011;Hu et al. 2016). However, the very same conditions that give rise to these nutrient-deprived microenvironments also induced autophagy. Consequently, the autophagic catabolism of membrane phospholipids provides a ready source of free fatty acids that maintains respiration in subpopulations of cancer cells, therefore enabling their survival in a low-glucose environment. The increase in fatty acid oxidation and OXPHOS requires redox homeostasis, and this is provided by the concomitant activation of AMPK, which increases NADPH, with a subsequent mitigation of ROS. Collectively, treatment-enforced metabolic reprogramming supports cancer cell fitness by providing fatty acids and NADPH to maximize survival. Since the rate of autophagic release of fatty acids does not match the rate of mitochondrial consumption, these LDs serve a dual purpose: first, as a buffer to reduce lipotoxicity by storing lipid intermediates and, second, to transport these lipids to the mitochondria (Singh et al. 2009;Unger et al. 2010;Rambold et al. 2015). Consequently, these energy-strapped residual cancer cells increase fatty acid oxidation, as it is the most energetically efficient way to generate ATP. Long-lived cell types such as cardiac myocytes and memory T cells (Pearce et al. 2009;Chung et al. 2010) depend on fatty acid metabolism for survival, and we see this as yet another example of cancer cells hijacking normal physiological processes to their benefit. Our screen identified several structurally different Janus family kinase inhibitors that inhibited mTORC1 and induced autophagic flux. While serendipitous, these findings are not unexpected, as small molecules inhibit several kinases and would directly and/or indirectly interdict the PI3K-AKT-mTOR pathway. To date, JAK inhibitors have been approved for and/or are undergoing late stage clinical trials in MPN, including the focus of this study, CYT387 (momelutinib) (Patel et al. 2016;Winton and Kota 2017). However, complete cytogenetic or molecular responses with JAK inhibitors have not been observed, with clinical benefit mainly resulting from improved performance status due to reduced cytokine levels rather than the elimination of cancer cells (Verstovsek et al. 2012;Vannucchi et al. 2015). Therefore, our finding that JAK inhibitors induce autophagy in both solid tumors and MPN cells, which then maintain residual disease potentially through the hydrolysis of phospholipids, may offer an explanation of why this class of inhibitors has not been able to eliminate drug-tolerant cancer cells and effect durable responses. Combination therapies come with the increased risk of side effects. Notably, CYT387, MK2206, and varespladib have all been tested in human clinical trials, and their maximum tolerated doses have been established; the challenge ahead will be to develop optimal dosing schedules that balance target engagement with side effects. However, most small molecule inhibitors have favorable toxicity profiles, and metabolic targets would be non-cross-resistant and predicted to have different side effects that are not overlapping. The experience with infectious diseases highlights the importance of combinations to achieve rapid efficient cancer suppression; i.e., HAART (highly active anti-retroviral therapy) in HIV, which is routinely used to produce durable clinical responses and prevent the emergence of resistance. Polytherapy in cancer is similarly justified and achievable, and here we outline the molecular roadmap for interdicting signaling and metabolism to override treatment-induced autophagy. Patient tumor ex vivo organotypic culture Tumor tissue samples were collected at the time of surgical removal from consented patients and transported in IMEM + FBS + PS. The tissue was sliced into thin sections using a surgical knife. Sections were cultured on an organotypic insert (EMD, PICMORG50) for 24 h in IMEM, 10% FBS, 1% PS, and 50 µg/ mL holo-transferrin with drug. A section of each tumor was immediately fixed in 10% buffered formalin to confirm tissue viability. After culture, treated tissue sections were fixed in 10% buffered formalin and embedded in paraffin. Paraffin-embedded tumors were evaluated for morphology (H&E) and immunofluorescent signaling. Cell viability and apoptosis analysis Cell viability assays were performed by plating 3 × 10 3 cells per well in 24-well plates in triplicate and treating them the following day with the indicated agents. The experiment was continued for 5 d, and then the cells were fixed using 4% formaldehyde and stained for 1 h with Syto60. Fluorescence was measured and quantified, and photographs were obtained using a LiCor Odyssey infrared imager. The effect of CYT387, MK2206, and the CYT387 +MK2206 combination on cell number was assessed as fold of DMSO-treated control cells. Experimental results are the average of at least three independent experiments. Apoptosis was determined using caspase 3/7 Glo assay kit (Promega) following the manufacturer's instructions. Briefly, 2000 cells per well were plated in 96-well plates and cultured for 72 h. Cells were treated with CYT387, MK2206, and the combination of CYT387 and MK2206 for 72 h, and then 100 µL of reagent was added to each well and incubated for 30 min at room temperature. Caspase 3/ 7 activity was measured using a luminometer. Luminescence values were normalized by cell numbers. The effect of CYT387, MK2206, and the CYT387+MK2206 combination on caspase 3/ 7 activation was assessed as fold of DMSO-treated control cells. High-content imaging A seven-point dilution series of 116 small molecule inhibitors covering a 1000× concentration range was plated into three 384well plates using the EP Motion automated dispensing system. Control wells with equal volumes of DMSO were included as negative controls. ACHN cells were grown, trypsinized, counted, and plated directly into warm drug plates using a Multidrop Combi dispenser. Plates were incubated for 72 h and subsequently imaged on an Olympus ScanR Platform at 10× magnification, performing four images per well in 384-well plates. Singlecell nuclear and cytoplasmic fluorescent intensities were calculated using the Olympus ScanR analysis software: The DAPI-positive region of each cell was used as a boundary to quantitate nucleus counts for analysis of cell growth, and integrated nuclear DNA staining intensity was used for cell cycle analysis. A 10-pixel extension of the nuclear region (and not including the nuclear region) was used to quantitate cytoplasmic signal of immunofluorescent staining of p62 protein and phosphorylation of S6. The mean signal intensity of each marker in all cells per well was used as the metric for cytoplasmic marker expression (average intensity of pS6 and p62). Unsupervised hierarchical clustering was used to identify compounds that produced similar pS6 and p62 dose response phenotypes after treatment. Western blotting Cells were plated in six-well dishes and treated the following day with the indicated agents. Treatments were for 24 h, after which cells were washed with ice-cold PBS and lysed with RIPA buffer (Sigma). Phosphatase inhibitor cocktail set II and protease inhibitor cocktail set III (EMD Millipore) were added at the time of lysis. Lysates were centrifuged at 15,000g for 10 min at 4°C. Protein concentrations were calculated based on a BCA assay-generated (Thermo Scientific) standard curve. Proteins were resolved using the NuPAGE Novex minigel system on 4%-12% Bis-Tris gels (Invitrogen). For Western blotting, equal amounts of cell lysates (15-20 µg of protein) were resolved with SDS-PAGE and transferred to membranes. The membrane was probed with primary antibodies, washed, and then incubated with corresponding fluorescent secondary antibodies and washed. The fluorescent signal was captured using a LI-COR Odyssey imaging system, and fluorescent intensity was quantified using the Odyssey software where indicated. The following antibodies were used for Western blots: p-S6 (S240/244), S6, LC3B, p-Akt(S473), p-Akt(T308), Akt, and cleaved caspase3 from Cell Signaling Technologies, and p-Stat3 (Y705), Stat3, and β-actin (AC15) from Abcam. Ki67 (Dako) and cleaved caspase 3 (Cell Signaling Technologies) were used for immunohistochemistry. MK2206 and CYT387 for in vitro and in vivo use were purchased from LC Labs and ChemieTek, respectively. BX795 and GDC0941 were purchased from Sigma. In vivo xenograft studies Six-week-old mice were used for human RCC xenografts. For both ACHN and SN12C cell lines, 2 × 10 6 cells were diluted in 50 µL of PBS and 50 µL of Matrigel (BD Biosciences) and injected subcutaneously into the right and left flanks of each mouse. Tumors were monitored until they reached an average size of 50-80 mm 3 (∼2 wk), at which point treatments were begun. CYT387 (50 mg/kg per day) was administered by oral gavage 5 d per week. MK2206 (60 mg/kg per day) was administered by oral gavage 2-3 d per week. CYT387 was dissolved in NMP/Captisol (Cydex), and MK2206 was dissolved in Captisol (Cydex). Tumors and mouse weights were measured twice weekly. At least six to eight mice per treatment group were included. All mice were eu-thanized using CO 2 inhalation followed by cervical dislocation per institutional guidelines at Oregon Health and Science University (OHSU). Experiments were approved by the Institutional Animal Care and Use Committee at OHSU. Phosphoproteomics screen and data analysis Enriched phosphopeptides were digested with trypsin and analyzed by MS following the published Cell Signaling Technology protocol (Rush et al. 2005;Moritz et al. 2010;Zhuang et al. 2013). MS data analysis MS raw files were analyzed via MaxQuant version 1.5.3.30 (Cox and Mann 2008), and MS/MS fragmentation spectra were searched using Andromeda (Cox et al. 2011) against human canonical and isoform sequences in Swiss-Prot (downloaded in September 2016 from http://uniprot.org; Apweiler et al. 2004). Quantitative phosphopeptide data were log 10 transformed, and missing data were imputed before applying quantile normalization as described previously (Drake et al. 2016). Quantitative data are in Supplemental Tables 3-8. Hierarchical clustering was performed with the Cluster 3.0 program (Eisen et al. 1998) using distance that was based on the Pearson correlation and applying pairwise average linkage analysis. Java Treeview was used to visualize clustering results (Saldanha 2004). KSEA KSEA was performed as described previously (Drake et al. 2012). Briefly, the phosphopeptides were rank-ordered by fold change, on average, between CYT387 treatment and control, and the enrichment score was calculated using the Kolmogorov-Smirnov statistic. Permutation analysis was conducted to calculate statistical significance. The normalized enrichment score was calculated by dividing the enrichment score by the average of the absolute values of all enrichment scores from the permutation analysis (Supplementary Tables 9, 10). DAVID pathway analysis To generate an appropriate list for use in DAVID (Huang da et al. 2009a, b), phosphopeptides were initially filtered with a false discovery rate of <0.20 (Supplementary Tables 11, 12). Phosphopeptides that were 1.5-fold enriched, on average, in either CYT387 treatment or no treatment were selected. Enrichment for a phosphopeptide was reversed if a functional annotation (Hornbeck et al. 2015) indicated protein activity inhibition. To reduce the complexity of this list, if multiple phosphopeptides mapped to a gene, then the most enriched phosphopeptide was selected. The only exception made was if a functional annotation existed for one or more of the phosphopeptides, in which case the most enriched annotated phosphopeptide would be selected. If multiple phosphopeptides mapped to the same gene and had enrichment values that fell into both CYT387 treatment and no treatment, then those phosphopeptides and the corresponding gene were removed from the list to be analyzed. We input into DAVID the genes in the CYT387 treatment enriched group (Supplemental Table 11) to examine KEGG pathways that were more active with CYT387 treatment (Supplemental Table 12). Phospho-receptor tyrosine kinase (phospho-RTK) array The human phospho-RTK array kit was purchased from Cell Signaling Technologies and screened according to the manufacturer's protocol, with 150 µg of protein being used for each experiment. Signal intensity was calculated using a LI-COR Odyssey imaging system, and fluorescent intensity was quantified using the Odyssey software where indicated. In vitro 18 F-FDG uptake assays 18 F-FDG was purchased from the radiopharmacy at University of California at San Francisco. SN12C or ACHN cells (5 × 10 5 cells) were plated and exposed to vehicle or drug for 24 and 48 h, whereupon the cells were incubated with 0.5 μCi of 18 F-FDG for 1 h at 37°C. The 18 F-FDG in the medium and the intracellular 18 F-FDG were isolated and counted using a γ counter. The intracellular 18 F-FDG was expressed as a percentage of the total activity added to cells normalized to the cell number. Metabolomic profiling of cancer cells Metabolomic data and single-reaction monitoring (SRM) transitions were performed as described previously and are in Supplemental Table 13. Briefly, 2 million cells were plated overnight and serum-starved for 2 h prior to harvesting, after which cells were washed twice with PBS, harvested by scraping, and flash-frozen. For nonpolar metabolomic analyses, flash-frozen cell pellets were extracted in 4 mL of 2:1:1 chloroform/methanol/PBS with internal standards: 10 nmol of dodecylglycerol and 10 nmol of pentadecanoic acid. Organic and aqueous layers were separated by centrifugation, and the organic layer was extracted. The aqueous layer was acidified with 0.1% formic acid followed by re-extraction with 2 mL of chloroform. The second organic layer was combined with the first extract and dried under nitrogen, after which lipids were resuspended in 120 µL of chloroform. A 10-µL aliquot was then analyzed by both SRMbased LC-MS/MS or untargeted LC-MS. For polar metabolomic analyses, frozen cell pellets were extracted in 180 µL of 40:40:20 acetonitrile/methanol/water with internal standard: 1 nmol of d3 N15-serine. Following vortexing and bath sonication, the polar metabolite fraction (supernatant) was isolated by centrifugation. A 20-µL aliquot was then analyzed by both SRM-based LC-MS/ MS or untargeted LC-MS. For the SRM transitions where we monitored the transition of parent masses to the loss of the head group (e.g., loss of phosphocholine from PC), we ascertained the acyl chain specificities from previously described procedures (Long et al. 2011). For phospholipids such as PCs and PEs, we ascertained fatty acid acyl chain composition from phospholipids using a mobile phase containing both ammonium hydroxide and formic acid and monitored the fatty acid fragmentations from m/z at 40 V collision energy in negative ionization mode. For other phospholipids, such as PAs and PIs, we monitored the fatty acid fragmentations from m/z at 40 V collision energy in negative ionization mode in mobile phase containing just ammonium hydroxide. For the lipids that we measured in this study, the designated acyl chains represent the primary fatty acids that were on the lipid backbone. However, this method is less sensitive than monitoring the loss of head group from the phospholipid, and we therefore used SRM transitions for many phospholipids where we monitored for loss of head groups (e.g., PCs, PEs, PSs, PAs, and PIs). Relative levels of metabolites were quantified by integrating the area under the curve for each metabolite, normalizing to internal standard values, and then normalizing to the average values of the control groups (Louie et al. 2016). ROS detection ROS levels were measured with CellRox deep red (Molecular Probes). Cells were plated in a 96-well clear-bottomed cell culture plate with black sides. After adhering for 24 h, cells were treated with 2µM CYT387, 10µM MK2206, and 2µM CYT387 + 10µM MK2206. The complete medium + drug was removed after 24 h and replaced with 5 µM CellRox deep red in medium. Cells were incubated for 30 min at 37°C and then washed with PBS. Fluorescence signal was detected using a Bioteck Cytation 5 plate reader. Data were analyzed using Prism software. Cellular respiration OCR and ECAR were carried out in a XF96 Seahorse analyzer (Agilent/Seahorse Bioscience,). Cells were plated in the wells of 96-well plates (8 × 10 3 cells per well; XF96 plates; Seahorse Bioscience) and incubated overnight at 37°C. The next day, cells were treated with the indicated drugs for 24 h, and then the medium was changed to XF assay medium and loaded with glucose, oligomycin, and 2-DG, respectively, per the manufacturer's recommendation. Similarly, Mito Fuel Flex tests were performed on an XFe96 Bioanalyzer. At 24 h after treatment, all assays were performed according to the manufacturer's protocols. Immunohistochemistry Immunostaining was performed following deparaffinization and rehydration of slides. Antigen retrieval was performed in a pressure cooker using citrate buffer (pH 6.0) for 4 min. Nonspecific binding was blocked using Vector mouse IgG blocking serum for 30 min at room temperature. Samples were incubated at room temperature with rabbit monoclonal antibodies pS6 (Cell Signaling Technologies, 5364), cleaved caspase 3 (Cell Signaling Technologies, 9661), and Ki67 (Dako, M7240). Slides were developed with Vector Immpress rabbit IgG (Vector Laboratories, MP7401) and Vector Immpress mouse IgG (Vector Laboratories, MP7400) for 30 min at room temperature. Chromogenic detection was performed using Vector Immpact DAB (Vector Laboratories, SK4105) for 3 min. Slides were counterstained with hematoxylin. A 3DHistech MIDI scanner (Perkin Elmer) was used to capture whole-slide digital images with a 20× objective. Images were converted to MRXS files, and computer graphic analysis was completed using inForm 1.4.0 advanced image analysis software (Perkin Elmer). Morphological and immunofluorescence evaluation H&E slides of formalin-fixed paraffin-embedded tissue was used to assess the morphological integrity of tumor samples. Once integrity was confirmed, immunofluorescent analysis was performed for p-S6 (1:500; Cell Signaling Technologies), p-AKT (1:200; Cell Signaling Technologies), and LC3B (1:250; Cell Signaling Technologies). Four-micron sections were cut, deparaffinized, and rehydrated. Antigen retrieval was performed using citrate for 4 min in a pressure cooker. Slides were blocked using 2.5% normal goat serum for 30 min and then incubated in primary antibody for 1 h followed by secondary antibody mouse antirabbit Alexa 488 (1:1000; Molecular Probes) for 30 min. Slides were rinsed in PBS, air dried, and coverslipped using Dako mounting medium with DAPI. Lipid and mitochondrial staining Cells were grown on coverslips and then treated with drug for 24 h. Cells were fixed in 4% paraformaldehyde for 15 min and rinsed with PBS. Cells were washed with a 1% saponin solution for 15 min at room temperature and then washed several times in PBS to remove detergent. Cells were then incubated in Bodipy (ThermoFisher, D3922) at a final concentration of 1 µM for 10 min. Bodipy was removed, and slides were rinsed with PBS, airdried, and mounted on slides using Dako mounting medium with DAPI. To detect mitochondrial levels in treated cells, cells were grown on coverslips for 24 h. Mitotracker orange (ThermoFisher, M7511) was diluted in medium with drug at a final concentration of 1 M and incubated overnight. The medium was removed, and cells were fixed with 4% paraformaldehyde for 15 min. Cells were rinsed twice for 5 min in PBS and incubated in cold acetone for 10 min at −20°C. Acetone was removed, and cells were washed in PBS, air-dried, and mounted on slides with Dako mounting medium with DAPI. A 3DHistech MIDI scanner (Perkin Elmer) was used to capture whole-slide digital images with a 20× objective. Images were converted to MRXS files, and computer graphic analysis was completed using inForm 1.4.0 advanced image analysis software (Perkin Elmer). MDC staining Slides were plated on coverslips and allowed to adhere for 24 h. After adherence, cells were treated with drug for 24 h. After treatment, the drug was removed, and cells were washed once in PBS. Cells were labeled with a 50 mM concentration of autofluorescent marker MDC (Sigma) in PBS for 10 min at 37°C. Cells were fixed in 4% formaldehyde for 15 min at room temperature. Cells were washed twice in PBS for 5 min and mounted on slides using Dako mounting medium with DAPI. Coverslips were sealed with clear nail polish and imaged with a 3DHistech MIDI scanner as described above. Statistical analysis Mouse tumor size was analyzed by two-way ANOVA with time and drug as factors using Graphpad Prism. Mouse weight during treatment was analyzed by repeated measures of two-way ANOVA with time and drug as factors. A P-value of <0.05 was considered statistically significant. For immunohistochemistry, P-values were calculated using one-way ANOVA with Bonferroni's multiple comparison test (P < 0.05 , P < 0.01 , and P < 0.001 ). Metabolite fold changes were computed and visualized in Python script using the Openpyxl package (for importing Excel files) and the Matplotlib package (for visualizing fold changes).
#include <stdio.h> #include <string.h> int main(){ char guest[105]; char host[105]; char pile[105]; int indexName[27] = {0}; int indexPile[27] = {0}; int leng, lenh, lenp; scanf("%s", guest); scanf("%s", host); scanf("%s", pile); leng = strlen(guest); lenh = strlen(host); lenp = strlen(pile); if(leng + lenh != lenp){ printf("NO"); return 0; } for(int i=0;i<leng;i++){ indexName[guest[i]-64]++; } for(int i=0;i<lenh;i++){ indexName[host[i]-64]++; } for(int i=0;i<lenp;i++){ indexPile[pile[i]-64]++; } for(int i=1;i<27;i++){ if(indexName[i] != indexPile[i]){ printf("NO"); return 0; } } printf("YES"); return 0; }
package org.simpleflatmapper.converter.impl.time; import org.simpleflatmapper.converter.Context; import org.simpleflatmapper.converter.ContextualConverter; import java.time.Instant; import java.time.LocalTime; import java.time.ZoneId; import java.time.temporal.TemporalAccessor; import java.util.Date; public class ObjectToJavaLocalTimeConverter implements ContextualConverter<Object, LocalTime> { private final ZoneId zone; public ObjectToJavaLocalTimeConverter(ZoneId zoneId) { this.zone = zoneId; } @Override public LocalTime convert(Object o, Context context) throws Exception { if (o == null) { return null; } if (o instanceof Date) { return Instant.ofEpochMilli(((Date) o).getTime()).atZone(zone).toLocalTime(); } if (o instanceof Instant) { return ((Instant)o).atZone(zone).toLocalTime(); } if (o instanceof LocalTime) { return (LocalTime) o; } if (o instanceof TemporalAccessor) { return LocalTime.from((TemporalAccessor) o); } throw new IllegalArgumentException("Cannot convert " + o + " to LocalTime"); } }
<reponame>automatiko-io/automatiko-engine package io.automatiko.engine.codegen; import java.util.NoSuchElementException; import java.util.Optional; import io.automatiko.engine.api.definition.process.WorkflowProcess; import io.automatiko.engine.codegen.process.AbstractResourceGenerator; import io.automatiko.engine.codegen.process.ReactiveResourceGenerator; import io.automatiko.engine.codegen.process.ResourceGenerator; /** * This should be used to only create JAX-RS Resource Generators. IMPORTANT: it * will not consider Spring Generators. */ public class DefaultResourceGeneratorFactory extends ResourceGeneratorFactory { @Override public Optional<AbstractResourceGenerator> create(GeneratorContext context, WorkflowProcess process, String modelfqcn, String processfqcn, String appCanonicalName) { return GeneratorType.from(context).map(type -> { switch (type) { case QUARKUS: return new ResourceGenerator(context, process, modelfqcn, processfqcn, appCanonicalName); case QUARKUS_REACTIVE: return new ReactiveResourceGenerator(context, process, modelfqcn, processfqcn, appCanonicalName); default: throw new NoSuchElementException("No Resource Generator for: " + type); } }); } }
<reponame>checkinhq/checkin package acceptance import ( "database/sql" "github.com/DATA-DOG/godog" "github.com/DATA-DOG/godog/gherkin" ) type DbFeatureContext struct { driverName string db *sql.DB frozen bool } func NewDbFeatureContext(driverName string) *DbFeatureContext { return &DbFeatureContext{ driverName: driverName, } } func (c *DbFeatureContext) FeatureContext(s *godog.Suite) { if c.frozen { panic("trying to use a frozen feature context") } c.frozen = true s.BeforeScenario(c.beforeScenario) s.AfterScenario(c.afterScenario) } func (c *DbFeatureContext) beforeScenario(scenario interface{}) { var name string if s, ok := scenario.(*gherkin.Scenario); ok { name = s.Name } else if s, ok := scenario.(*gherkin.ScenarioOutline); ok { name = s.Name } // This works with https://github.com/DATA-DOG/go-txdb db, err := sql.Open(c.driverName, name) if err != nil { panic(err) } if err := db.Ping(); err != nil { panic(err) } c.db = db } func (c *DbFeatureContext) afterScenario(scenario interface{}, err error) { c.db.Close() } func (c *DbFeatureContext) DB() *sql.DB { return c.db }
/* * Complete the 'compareTriplets' function below. * * The function is expected to return an INTEGER_ARRAY. * The function accepts following parameters: * 1. INTEGER_ARRAY a * 2. INTEGER_ARRAY b */ public static List<Integer> compareTriplets(List<Integer> a, List<Integer> b) { List<Integer> count=new ArrayList<>(); int acount=0,bcount=0; for(int i =0;i<a.size();i++) { if(a.get(i)>b.get(i)) acount++; if(a.get(i)<b.get(i)) bcount++; } count.add(acount); count.add(bcount); return count; }
/** * Request to change a user's password. */ public class ChangePasswordRequestBuilder extends ActionRequestBuilder<ChangePasswordRequest, ActionResponse.Empty> implements WriteRequestBuilder<ChangePasswordRequestBuilder> { public ChangePasswordRequestBuilder(ElasticsearchClient client) { super(client, ChangePasswordAction.INSTANCE, new ChangePasswordRequest()); } public ChangePasswordRequestBuilder username(String username) { request.username(username); return this; } public static char[] validateAndHashPassword(SecureString password, Hasher hasher) { Validation.Error error = Validation.Users.validatePassword(password); if (error != null) { throw validationException(error.toString()); } return hasher.hash(password); } /** * Sets the password. Note: the char[] passed to this method will be cleared. */ public ChangePasswordRequestBuilder password(char[] password, Hasher hasher) { try (SecureString secureString = new SecureString(password)) { char[] hash = validateAndHashPassword(secureString, hasher); if (request.passwordHash() != null) { throw validationException("password_hash has already been set"); } request.passwordHash(hash); } return this; } /** * Sets the password hash. */ public ChangePasswordRequestBuilder passwordHash(char[] passwordHashChars, Hasher configuredHasher) { final Hasher resolvedHasher = Hasher.resolveFromHash(passwordHashChars); if (resolvedHasher.equals(configuredHasher) == false) { throw new IllegalArgumentException( "Provided password hash uses [" + resolvedHasher + "] but the configured hashing algorithm is [" + configuredHasher + "]" ); } if (request.passwordHash() != null) { throw validationException("password_hash has already been set"); } request.passwordHash(passwordHashChars); return this; } /** * Populate the change password request from the source in the provided content type */ public ChangePasswordRequestBuilder source(BytesReference source, XContentType xContentType, Hasher hasher) throws IOException { // EMPTY is ok here because we never call namedObject try ( InputStream stream = source.streamInput(); XContentParser parser = xContentType.xContent() .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream) ) { XContentUtils.verifyObject(parser); XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (User.Fields.PASSWORD.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.VALUE_STRING) { String password = parser.text(); final char[] passwordChars = password.toCharArray(); password(passwordChars, hasher); assert CharBuffer.wrap(passwordChars).chars().noneMatch((i) -> (char) i != (char) 0) : "expected password to " + "clear the char[] but it did not!"; } else { throw new ElasticsearchParseException( "expected field [{}] to be of type string, but found [{}] instead", currentFieldName, token ); } } else if (User.Fields.PASSWORD_HASH.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.VALUE_STRING) { char[] passwordHashChars = parser.text().toCharArray(); passwordHash(passwordHashChars, hasher); } else { throw new ElasticsearchParseException( "expected field [{}] to be of type string, but found [{}] instead", currentFieldName, token ); } } else { throw new ElasticsearchParseException( "failed to parse change password request. unexpected field [{}]", currentFieldName ); } } } return this; } private static ValidationException validationException(String message) { ValidationException validationException = new ValidationException(); validationException.addValidationError(message); return validationException; } }
/** * Adds the given {@link ResolvedArtifact} to the {@link ClassPool} when it is a directory. * * @param classPool the ClassPool to update. * @param resolvedArtifactName the name of the ResolvedArtifact to add. * @param resolvedDir the Directory of the ResolvedArtifact. * @throws IOException if any I/O error occurs. */ @VisibleForTesting void addDirDependencyToClassPool(@NonNull final ClassPool classPool, @NonNull final String resolvedArtifactName, @NonNull final File resolvedDir) throws IOException { final String absolutePath = resolvedDir.getAbsolutePath(); final String classesJarPath = findClassesJar(absolutePath); if (classesJarPath == null) { logger.debug("Could not find classes.jar for \"{}\" at path \"{}\".", resolvedArtifactName, absolutePath); return; } addPathToClassPool(classPool, classesJarPath); }
/** * An implementation of the circuit breaker that does nothing. * * @ExcludeFromJavadoc */ public static class NoOpCircuitBreaker implements HystrixCircuitBreaker { @Override public boolean allowRequest() { return true; } @Override public boolean isOpen() { return false; } @Override public void markSuccess() { } @Override public void markFailure() { } }
<filename>src/gr/uom/java/distance/MySystem.java package gr.uom.java.distance; import gr.uom.java.ast.*; import gr.uom.java.ast.association.Association; import gr.uom.java.ast.association.AssociationDetection; import gr.uom.java.ast.decomposition.MethodBodyObject; import java.util.*; public class MySystem { private Map<String,MyClass> classMap; private AssociationDetection associationDetection; private SystemObject systemObject; public MySystem(SystemObject systemObject, boolean includeStaticMembers) { this.systemObject = systemObject; this.classMap = new HashMap<String,MyClass>(); this.associationDetection = new AssociationDetection(systemObject); if(includeStaticMembers) generateSystemWithStaticMembers(); else generateSystem(); } private void generateSystem() { ListIterator<ClassObject> classIterator1 = systemObject.getClassListIterator(); while(classIterator1.hasNext()) { ClassObject co = classIterator1.next(); MyClass myClass = new MyClass(co.getName()); myClass.setClassObject(co); TypeObject superclassType = co.getSuperclass(); if(superclassType != null) { String superclass = superclassType.getClassType(); if(systemObject.getClassObject(superclass) != null) { myClass.setSuperclass(superclass); } } ListIterator<FieldObject> fieldIt = co.getFieldIterator(); while(fieldIt.hasNext()) { FieldObject fo = fieldIt.next(); if(!fo.isStatic()) { MyAttribute myAttribute = new MyAttribute(co.getName(),fo.getType().toString(),fo.getName()); myAttribute.setAccess(fo.getAccess().toString()); myAttribute.setFieldObject(fo); if(associationDetection.containsFieldObject(fo)) myAttribute.setReference(true); myClass.addAttribute(myAttribute); } } classMap.put(co.getName(),myClass); } ListIterator<ClassObject> classIterator2 = systemObject.getClassListIterator(); while(classIterator2.hasNext()) { ClassObject co = classIterator2.next(); MyClass myClass = classMap.get(co.getName()); ListIterator<MethodObject> methodIt = co.getMethodIterator(); while(methodIt.hasNext()) { MethodObject mo = methodIt.next(); if(!mo.isStatic() && systemObject.containsGetter(mo.generateMethodInvocation()) == null && systemObject.containsSetter(mo.generateMethodInvocation()) == null && systemObject.containsCollectionAdder(mo.generateMethodInvocation()) == null) { MethodInvocationObject delegation = systemObject.containsDelegate(mo.generateMethodInvocation()); if(delegation == null || (delegation != null && systemObject.getClassObject(delegation.getOriginClassName()) == null)) { MyMethod myMethod = new MyMethod(mo.getClassName(),mo.getName(), mo.getReturnType().toString(),mo.getParameterList()); if(mo.isAbstract()) myMethod.setAbstract(true); myMethod.setAccess(mo.getAccess().toString()); myMethod.setMethodObject(mo); MethodBodyObject methodBodyObject = mo.getMethodBody(); if(methodBodyObject != null) { MyMethodBody myMethodBody = new MyMethodBody(methodBodyObject); myMethod.setMethodBody(myMethodBody); } myClass.addMethod(myMethod); ListIterator<MyAttributeInstruction> attributeInstructionIterator = myMethod.getAttributeInstructionIterator(); while(attributeInstructionIterator.hasNext()) { MyAttributeInstruction myInstruction = attributeInstructionIterator.next(); MyClass ownerClass = classMap.get(myInstruction.getClassOrigin()); MyAttribute accessedAttribute = ownerClass.getAttribute(myInstruction); if(accessedAttribute != null) { if(accessedAttribute.isReference()) myMethod.setAttributeInstructionReference(myInstruction, true); accessedAttribute.addMethod(myMethod); } } } } } } } private void generateSystemWithStaticMembers() { ListIterator<ClassObject> classIterator1 = systemObject.getClassListIterator(); while(classIterator1.hasNext()) { ClassObject co = classIterator1.next(); MyClass myClass = new MyClass(co.getName()); myClass.setClassObject(co); TypeObject superclassType = co.getSuperclass(); if(superclassType != null) { String superclass = superclassType.getClassType(); if(systemObject.getClassObject(superclass) != null) { myClass.setSuperclass(superclass); } } ListIterator<FieldObject> fieldIt = co.getFieldIterator(); while(fieldIt.hasNext()) { FieldObject fo = fieldIt.next(); MyAttribute myAttribute = new MyAttribute(co.getName(),fo.getType().toString(),fo.getName()); myAttribute.setAccess(fo.getAccess().toString()); myAttribute.setFieldObject(fo); if(associationDetection.containsFieldObject(fo)) myAttribute.setReference(true); myClass.addAttribute(myAttribute); } classMap.put(co.getName(),myClass); } ListIterator<ClassObject> classIterator2 = systemObject.getClassListIterator(); while(classIterator2.hasNext()) { ClassObject co = classIterator2.next(); MyClass myClass = classMap.get(co.getName()); ListIterator<MethodObject> methodIt = co.getMethodIterator(); while(methodIt.hasNext()) { MethodObject mo = methodIt.next(); if(systemObject.containsGetter(mo.generateMethodInvocation()) == null && systemObject.containsSetter(mo.generateMethodInvocation()) == null && systemObject.containsCollectionAdder(mo.generateMethodInvocation()) == null) { MethodInvocationObject delegation = systemObject.containsDelegate(mo.generateMethodInvocation()); if(delegation == null || (delegation != null && systemObject.getClassObject(delegation.getOriginClassName()) == null)) { MyMethod myMethod = new MyMethod(mo.getClassName(),mo.getName(), mo.getReturnType().toString(),mo.getParameterList()); if(mo.isAbstract()) myMethod.setAbstract(true); myMethod.setAccess(mo.getAccess().toString()); myMethod.setMethodObject(mo); MethodBodyObject methodBodyObject = mo.getMethodBody(); if(methodBodyObject != null) { MyMethodBody myMethodBody = new MyMethodBody(methodBodyObject); myMethod.setMethodBody(myMethodBody); } myClass.addMethod(myMethod); ListIterator<MyAttributeInstruction> attributeInstructionIterator = myMethod.getAttributeInstructionIterator(); while(attributeInstructionIterator.hasNext()) { MyAttributeInstruction myInstruction = attributeInstructionIterator.next(); MyClass ownerClass = classMap.get(myInstruction.getClassOrigin()); MyAttribute accessedAttribute = ownerClass.getAttribute(myInstruction); if(accessedAttribute != null) { if(accessedAttribute.isReference()) myMethod.setAttributeInstructionReference(myInstruction, true); accessedAttribute.addMethod(myMethod); } } } } } } } public Iterator<MyClass> getClassIterator() { return classMap.values().iterator(); } public MyClass getClass(String className) { return classMap.get(className); } public void addClass(MyClass newClass) { if(!classMap.containsKey(newClass.getName())) { classMap.put(newClass.getName(), newClass); } } public void removeClass(MyClass oldClass) { if(classMap.containsKey(oldClass.getName())) { classMap.remove(oldClass.getName()); } } public SystemObject getSystemObject() { return systemObject; } public List<Association> getAssociationsOfClass(ClassObject classObject) { return associationDetection.getAssociationsOfClass(classObject); } public Association containsAssociationWithMultiplicityBetweenClasses(String from, String to) { Association association = associationDetection.getAssociation(from, to); if(association != null && association.isContainer()) return association; return null; } }
def execute_config(log, transaction_id, scaling_group, launch_config):
#include "GFSDK_SSAO.h" struct __declspec(dllexport) HBAO { GFSDK_SSAO_CustomHeap heap; GFSDK_SSAO_InputData_D3D11 input; GFSDK_SSAO_Context_D3D11 *context; GFSDK_SSAO_Parameters parameters; GFSDK_SSAO_Output_D3D11 output; HBAO( ID3D11Device *device, ID3D11ShaderResourceView *depthView, ID3D11RenderTargetView *renderView, float projection[16]); int RenderAO(ID3D11DeviceContext *context); };
def create_copy(self): word_copy = self.word font_copy = self.font colour_copy = copy.deepcopy(self.colour) background_copy = copy.deepcopy(self.background) antialiasing_copy = self.antialiasing condition_copy = copy.deepcopy(self.condition) x_copy = self.x y_copy = self.y response_copy = copy.deepcopy(self.response) stimulus_copy = Stimulus( word_copy, font_copy, colour = colour_copy, background = background_copy, antialiasing = antialiasing_copy, condition = condition_copy, x = x_copy, y = y_copy, response = response_copy ) return stimulus_copy
package com.atanas.web.config; import org.springframework.context.annotation.*; import org.springframework.security.crypto.bcrypt.*; import org.springframework.security.crypto.password.*; import org.springframework.web.servlet.config.annotation.*; @Configuration @EnableWebMvc @EnableAspectJAutoProxy @ComponentScan(basePackages = "com.atanas.web") public class MvcCoreConfig implements WebMvcConfigurer { @Bean public PasswordEncoder passwordEncoder() { PasswordEncoder passwordEncoder = new BCryptPasswordEncoder(12); return passwordEncoder; } }
<filename>src/kernel/mm/virt_memory.c /* * EOS - Experimental Operating System * Virtual memory manager module */ #include <kernel/mm/virt_memory.h> #include <kernel/mm/phys_memory.h> #include <kernel/tty.h> #include <libk/string.h> page_directory *kernel_page_dir; // Pointer (physical) to kernel page dircetory structure bool vmm_alloc_page(virtual_addr vaddr) { physical_addr paddr = pmm_alloc_block(); if (!paddr) { return false; } vmm_map_page(paddr, vaddr); return true; } bool vmm_alloc_page_with_userbit(virtual_addr vaddr) { physical_addr paddr = pmm_alloc_block(); if (!paddr) { return false; } vmm_map_page(paddr, vaddr); page_table_entry *pte = GET_PTE(vaddr); page_table_entry_add_attrib(pte, I86_PTE_USER); return true; } void vmm_free_page(virtual_addr vaddr) { page_table_entry *pte = GET_PTE(vaddr); if (!page_table_entry_is_present(*pte)) { tty_printf("oh, you try to delete not present page\n"); return; } physical_addr block = page_table_entry_frame(*pte); if (block) { pmm_free_block(block); } page_table_entry_del_attrib(pte, I86_PTE_PRESENT); } void vmm_create_kernel_page_dir() { kernel_page_dir = (page_directory*) pmm_alloc_block(); if (kernel_page_dir == 0xFFFFFFFF) { tty_printf("Failed to allocate phys memory for kernel page dir\n"); // Panic return; } //page_directory *pd = (page_directory*) vmm_temp_map_page((physical_addr) kernel_page_dir); page_directory *pd = kernel_page_dir; memset(pd, 0, sizeof(page_directory)); int i; for (i = 0; i < PAGE_ENTRIES; i++) { page_dir_entry *pde = (page_dir_entry*) &pd->entries[i]; page_dir_entry_add_attrib(pde, I86_PTE_WRITABLE); page_dir_entry_del_attrib(pde, I86_PTE_PRESENT); if (i == PAGE_ENTRIES - 1) { // Fractal(recursive) mapping technique, which allows us to access PD and PT page_dir_entry_add_attrib(pde, I86_PTE_PRESENT); page_dir_entry_set_frame(pde, (physical_addr) kernel_page_dir); //tty_printf("pd[1023] = %x\n", pd->entries[1023]); } } } void vmm_map_page(physical_addr paddr, virtual_addr vaddr) { page_dir_entry *pde = GET_PDE(vaddr); if (!page_dir_entry_is_present(*pde)) { // If page table isnt present, create it physical_addr pt_p = pmm_alloc_block(); // It's phys addr! if (pt_p == 0xFFFFFFFF) { tty_printf("wtf? no free phys memory\n"); return; } page_table *pt_v = (page_table*) vmm_temp_map_page(pt_p); // Because we need to write! memset(pt_v, 0, sizeof(page_table)); page_dir_entry_add_attrib(pde, I86_PDE_PRESENT); page_dir_entry_add_attrib(pde, I86_PDE_WRITABLE); page_dir_entry_set_frame(pde, pt_p); } page_table_entry *pte = GET_PTE(vaddr); page_table_entry_set_frame(pte, paddr); page_table_entry_add_attrib(pte, I86_PTE_PRESENT); page_table_entry_add_attrib(pte, I86_PTE_WRITABLE); flush_tlb_entry(vaddr); } virtual_addr vmm_temp_map_page(physical_addr paddr) { page_table_entry *pte = GET_PTE(TEMP_PAGE_ADDR); page_table_entry_set_frame(pte, PAGE_ALIGN_DOWN(paddr)); // Old:DOWN page_table_entry_add_attrib(pte, I86_PTE_PRESENT); page_table_entry_add_attrib(pte, I86_PTE_WRITABLE); //flush_tlb_entry(TEMP_PAGE_ADDR); asm volatile("invlpg %0" :: "m" (*(uint32_t *) TEMP_PAGE_ADDR) : "memory" ); //flush_tlb_all(); return TEMP_PAGE_ADDR; } // Switch page directory, reveives physical address void vmm_switch_page_directory(page_directory *page_dir_phys_addr) { asm volatile("mov %0, %%cr3" :: "r"((uint32_t) page_dir_phys_addr)); } void vmm_init() { //tty_printf("1\n"); vmm_create_kernel_page_dir(); page_table *table1 = (page_table*) pmm_alloc_block(); page_table *table2 = (page_table*) pmm_alloc_block(); // Clear allocated page tables memset((void*) table1, 0, sizeof(page_table)); memset((void*) table2, 0, sizeof(page_table)); // Maps first MB to 3GB physical_addr frame; virtual_addr virt; for (frame = 0x0, virt = 0xC0000000; frame < 0x100000/*0x100000*/; frame += PAGE_SIZE, virt += PAGE_SIZE) { page_table_entry page = 0; page_table_entry_add_attrib(&page, I86_PTE_PRESENT); page_table_entry_set_frame(&page, frame); table1->entries[PAGE_TABLE_INDEX(virt)] = page; } // Maps kernel pages and phys mem pages for (frame = KERNEL_START_PADDR, virt = KERNEL_START_VADDR; frame < KERNEL_PHYS_MAP_END; frame += PAGE_SIZE, virt += PAGE_SIZE) { page_table_entry page = 0; page_table_entry_add_attrib(&page, I86_PTE_PRESENT); page_table_entry_set_frame(&page, frame); table2->entries[PAGE_TABLE_INDEX(virt)] = page; } page_dir_entry *pde1 = (page_dir_entry*) &kernel_page_dir->entries[PAGE_DIRECTORY_INDEX(0x00000000)]; //pdirectory_lookup_entry(cur_directory, 0x00000000); page_dir_entry_add_attrib(pde1, I86_PDE_PRESENT); page_dir_entry_add_attrib(pde1, I86_PDE_WRITABLE); page_dir_entry_set_frame(pde1, (physical_addr) table1); page_dir_entry **pde2 = (page_dir_entry*) &kernel_page_dir->entries[PAGE_DIRECTORY_INDEX(0xC0100000)]; //pdirectory_lookup_entry(cur_directory, 0xC0100000); page_dir_entry_add_attrib(pde2, I86_PDE_PRESENT); page_dir_entry_add_attrib(pde2, I86_PDE_WRITABLE); page_dir_entry_set_frame(pde2, (physical_addr) table2); update_phys_memory_bitmap_addr(KERNEL_END_VADDR); enable_paging((physical_addr) kernel_page_dir); //tty_printf("Virtual memory manager initialized!\n"); } void vmm_test() { tty_printf("kernel_page_dir = %x\n", (physical_addr) kernel_page_dir); physical_addr padr1 = 0xC0500000; virtual_addr vadr1 = vmm_temp_map_page(padr1); *(uint8_t*) vadr1 = 77; tty_printf("%x = %x\n", padr1, *(uint8_t*) vadr1); //tty_printf("%x = %x\n", (0x00100000), *(uint8_t*) (0x00100000)); IT WILL CAUSE PAGE FAULT!!!! BEACUSE WE 1:1 MAPPED UP TO 1MB PHYS MEM BUT NEVKLYUCHITELNO! tty_printf("%x = %x\n", (0x00100000 - 1), *(uint8_t*) (0x00100000 - 1)); //asm volatile( "movl %0, %%cr3" :: "r" (kernel_page_dir)); int eip; asm volatile("1: lea 1b, %0;": "=a"(eip)); tty_printf("EIP = %x ", eip); } // Add attribute to pte void page_table_entry_add_attrib(page_table_entry *entry, uint32_t attrib) { *entry |= attrib; } // Delete attribute to pte void page_table_entry_del_attrib(page_table_entry *entry, uint32_t attrib) { *entry &= ~attrib; } // Map pte to physical frame void page_table_entry_set_frame(page_table_entry *entry, physical_addr addr) { *entry = (*entry & ~I86_PTE_FRAME) | addr; } bool page_table_entry_is_present(page_table_entry entry) { return entry & I86_PTE_PRESENT; } bool page_table_entry_is_writable(page_table_entry entry) { return entry & I86_PTE_WRITABLE; } // Return the address of physical frame which pte refers to physical_addr page_table_entry_frame(page_table_entry entry) { return entry & I86_PTE_FRAME; } // Functions for Page Directory Entries // Add attribute to pde void page_dir_entry_add_attrib(page_dir_entry *entry, uint32_t attrib) { *entry |= attrib; } // Delete attribute to pde void page_dir_entry_del_attrib(page_dir_entry *entry, uint32_t attrib) { *entry &= ~attrib; //old: was without ~ !! } // Map pde to physical frame (where the appropriate page table stores) void page_dir_entry_set_frame(page_dir_entry *entry, physical_addr addr) { *entry = (*entry & ~I86_PDE_FRAME) | addr; } bool page_dir_entry_is_present(page_dir_entry entry) { return entry & I86_PDE_PRESENT; } bool page_dir_entry_is_user(page_dir_entry entry) { return entry & I86_PDE_USER; } bool page_dir_entry_is_4mb(page_dir_entry entry) { return entry & I86_PDE_4MB; } bool page_dir_entry_is_writable(page_dir_entry entry) { return entry & I86_PDE_WRITABLE; } // Return the address of physical frame which pde refers to physical_addr page_dir_entry_frame(page_dir_entry entry) { return entry & I86_PDE_FRAME; } void flush_tlb_entry(virtual_addr addr) { asm volatile("invlpg (%0)" : : "b"(addr) : "memory"); }
<reponame>marferfer/SpinOff-LoL<filename>Aplicacion Movil/generated/bundles/login-transition/build/Android/Preview/app/src/main/include/Fuse.Controls.Native.-5b6a09a7.h // This file was generated based on C:/Users/JuanJose/AppData/Local/Fusetools/Packages/Fuse.Controls.Native/1.9.0/NativeRenderer.uno. // WARNING: Changes might be lost if you edit this file directly. #pragma once #include <Fuse.Controls.Native.-92d9dd5d.h> #include <Uno.IDisposable.h> #include <Uno.Int2.h> #include <Uno.Object.h> namespace g{namespace Fuse{namespace Controls{namespace Native{struct NativeViewRenderer;}}}} namespace g{namespace Fuse{namespace Controls{namespace Native{struct ViewHandle;}}}} namespace g{namespace Java{struct Object;}} namespace g{namespace Uno{struct Float2;}} namespace g{namespace Uno{struct Float4x4;}} namespace g{ namespace Fuse{ namespace Controls{ namespace Native{ // public sealed extern class NativeViewRenderer :17 // { struct NativeViewRenderer_type : uType { ::g::Uno::IDisposable interface0; ::g::Fuse::Controls::Native::IViewHandleRenderer interface1; }; NativeViewRenderer_type* NativeViewRenderer_typeof(); void NativeViewRenderer__ctor__fn(NativeViewRenderer* __this); void NativeViewRenderer__AllocPixelBuffer_fn(int32_t* w, int32_t* h, ::g::Java::Object** __retval); void NativeViewRenderer__Dispose_fn(NativeViewRenderer* __this); void NativeViewRenderer__Draw_fn(NativeViewRenderer* __this, ::g::Fuse::Controls::Native::ViewHandle* viewHandle, ::g::Uno::Float4x4* localToClipTransform, ::g::Uno::Float2* position, ::g::Uno::Float2* size, float* density); void NativeViewRenderer__FreePixelBuffer_fn(::g::Java::Object* bitmap); void NativeViewRenderer__Invalidate_fn(NativeViewRenderer* __this); void NativeViewRenderer__New1_fn(NativeViewRenderer** __retval); void NativeViewRenderer__ReleaseResources_fn(NativeViewRenderer* __this); void NativeViewRenderer__Upload_fn(::g::Java::Object* viewHandle, ::g::Java::Object* pixelBuffer, bool* reuse, int32_t* w, int32_t* h); struct NativeViewRenderer : uObject { uStrong<uObject*> _pixelBuffer; uint32_t _textureHandle; bool _valid; ::g::Uno::Int2 _prevSize; void ctor_(); void Dispose(); void Draw(::g::Fuse::Controls::Native::ViewHandle* viewHandle, ::g::Uno::Float4x4 localToClipTransform, ::g::Uno::Float2 position, ::g::Uno::Float2 size, float density); void Invalidate(); void ReleaseResources(); static ::g::Java::Object* AllocPixelBuffer(int32_t w, int32_t h); static void FreePixelBuffer(::g::Java::Object* bitmap); static NativeViewRenderer* New1(); static void Upload(::g::Java::Object* viewHandle, ::g::Java::Object* pixelBuffer, bool reuse, int32_t w, int32_t h); }; // } }}}} // ::g::Fuse::Controls::Native
<reponame>a4913994/openstack_trove # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from unittest import mock import uuid import jsonschema from testtools.matchers import Equals from trove.backup import models from trove.backup import state from trove.backup.service import BackupController from trove.common import apischema from trove.common import context from trove.common import wsgi from trove.tests.unittests import trove_testtools from trove.tests.unittests.util import util class TestBackupController(trove_testtools.TestCase): def setUp(self): super(TestBackupController, self).setUp() self.uuid = "d6338c9c-3cc8-4313-b98f-13cc0684cf15" self.invalid_uuid = "ead-edsa-e23-sdf-23" self.controller = BackupController() self.context = context.TroveContext(project_id=str(uuid.uuid4())) util.init_db() def tearDown(self): super(TestBackupController, self).tearDown() backups = models.DBBackup.find_all(tenant_id=self.context.project_id) for backup in backups: backup.delete() def test_validate_create_complete(self): body = {"backup": {"instance": self.uuid, "name": "testback-backup"}} schema = self.controller.get_schema('create', body) validator = jsonschema.Draft4Validator(schema) self.assertTrue(validator.is_valid(body)) def test_validate_create_with_blankname(self): body = {"backup": {"instance": self.uuid, "name": ' '}} schema = self.controller.get_schema('create', body) validator = jsonschema.Draft4Validator(schema) self.assertFalse(validator.is_valid(body)) errors = sorted(validator.iter_errors(body), key=lambda e: e.path) self.assertEqual(1, len(errors)) self.assertIn("' ' does not match '^.*[0-9a-zA-Z]+.*$'", errors[0].message) def test_validate_create_with_invalidname(self): body = {"backup": {"instance": self.uuid, "name": '$#@&?'}} schema = self.controller.get_schema('create', body) validator = jsonschema.Draft4Validator(schema) self.assertFalse(validator.is_valid(body)) errors = sorted(validator.iter_errors(body), key=lambda e: e.path) self.assertEqual(1, len(errors)) self.assertIn("'$#@&?' does not match '^.*[0-9a-zA-Z]+.*$'", errors[0].message) def test_validate_create_invalid_uuid(self): body = {"backup": {"instance": self.invalid_uuid, "name": "testback-backup"}} schema = self.controller.get_schema('create', body) validator = jsonschema.Draft4Validator(schema) self.assertFalse(validator.is_valid(body)) errors = sorted(validator.iter_errors(body), key=lambda e: e.path) self.assertThat(errors[0].message, Equals("'%s' does not match '%s'" % (self.invalid_uuid, apischema.uuid['pattern']))) def test_validate_create_incremental(self): body = {"backup": {"instance": self.uuid, "name": "testback-backup", "parent_id": self.uuid}} schema = self.controller.get_schema('create', body) validator = jsonschema.Draft4Validator(schema) self.assertTrue(validator.is_valid(body)) def test_invalid_parent_id(self): body = {"backup": {"instance": self.uuid, "name": "testback-backup", "parent_id": self.invalid_uuid}} schema = self.controller.get_schema('create', body) validator = jsonschema.Draft4Validator(schema) self.assertFalse(validator.is_valid(body)) errors = sorted(validator.iter_errors(body), key=lambda e: e.path) self.assertThat(errors[0].message, Equals("'%s' does not match '%s'" % (self.invalid_uuid, apischema.uuid['pattern']))) def test_list_by_project(self): req = mock.MagicMock(GET={'project_id': self.context.project_id}, environ={wsgi.CONTEXT_KEY: self.context}, url='http://localhost') instance_id = str(uuid.uuid4()) backup_name = str(uuid.uuid4()) location = 'https://object-storage.com/tenant/database_backups/backup' models.DBBackup.create(tenant_id=self.context.project_id, name=backup_name, state=state.BackupState.NEW, instance_id=instance_id, deleted=False, size=2.0, location=location) res = self.controller.index(req, 'fake_tenant_id') self.assertEqual(200, res.status) backups = res.data(None)['backups'] self.assertGreaterEqual(len(backups), 1) our_backup = None for backup in backups: if backup['name'] == backup_name: our_backup = backup break self.assertIsNotNone(our_backup) expected = { 'name': backup_name, 'locationRef': location, 'instance_id': instance_id, 'size': 2.0, 'status': 'NEW', } self.assertTrue( set(expected.items()).issubset(set(our_backup.items())) ) # Get backups of unknown project req = mock.MagicMock(GET={'project_id': str(uuid.uuid4())}, environ={wsgi.CONTEXT_KEY: self.context}, url='http://localhost') res = self.controller.index(req, 'fake_tenant_id') self.assertEqual(200, res.status) backups = res.data(None)['backups'] self.assertEqual(0, len(backups))
<gh_stars>0 // ---------------------------------------------------------------------------- // Copyright (c) Microsoft Corporation. All rights reserved. // ---------------------------------------------------------------------------- // tslint:disable:no-unused-expression max-func-body-length promise-function-async max-line-length insecure-random // tslint:disable:object-literal-key-quotes no-function-expression no-non-null-assertion align no-http-string import * as assert from "assert"; import { randomBytes } from "crypto"; import { ISuiteCallbackContext, ITestCallbackContext } from "mocha"; import { Uri } from "vscode"; import { DefinitionKind, DeploymentTemplate, Histogram, INamedDefinition, IncorrectArgumentsCountIssue, IParameterDefinition, IVariableDefinition, Json, Language, ReferenceInVariableDefinitionsVisitor, ReferenceList, TemplateScope, UnrecognizedUserFunctionIssue, UnrecognizedUserNamespaceIssue } from "../extension.bundle"; import { IDeploymentTemplate, sources, testDiagnostics } from "./support/diagnostics"; import { parseTemplate } from "./support/parseTemplate"; import { stringify } from "./support/stringify"; import { testWithLanguageServer } from "./support/testWithLanguageServer"; import { DISABLE_SLOW_TESTS } from "./testConstants"; const IssueKind = Language.IssueKind; const tleSyntax = IssueKind.tleSyntax; const fakeId = Uri.file("https://fake-id"); suite("DeploymentTemplate", () => { function findReferences(dt: DeploymentTemplate, definitionKind: DefinitionKind, definitionName: string, scope: TemplateScope): ReferenceList { // tslint:disable-next-line: no-unnecessary-initializer let definition: INamedDefinition | undefined; // tslint:disable-next-line: switch-default switch (definitionKind) { case DefinitionKind.BuiltinFunction: break; case DefinitionKind.Namespace: break; case DefinitionKind.Parameter: definition = scope.getParameterDefinition(definitionName); break; case DefinitionKind.UserFunction: break; case DefinitionKind.Variable: definition = scope.getVariableDefinition(definitionName); break; default: assert.fail("Test scenario NYI"); } if (!definition) { return new ReferenceList(definitionKind, []); } return dt.findReferencesToDefinition(definition!); } suite("constructor(string)", () => { test("Null stringValue", () => { // tslint:disable-next-line:no-any assert.throws(() => { new DeploymentTemplate(<any>undefined, fakeId); }); }); test("Undefined stringValue", () => { // tslint:disable-next-line:no-any assert.throws(() => { new DeploymentTemplate(<any>undefined, fakeId); }); }); test("Empty stringValue", () => { const dt = new DeploymentTemplate("", fakeId); assert.deepStrictEqual("", dt.documentText); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); assert.deepStrictEqual([], dt.topLevelScope.parameterDefinitions); }); test("Non-JSON stringValue", () => { const dt = new DeploymentTemplate("I'm not a JSON file", fakeId); assert.deepStrictEqual("I'm not a JSON file", dt.documentText); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); assert.deepStrictEqual([], dt.topLevelScope.parameterDefinitions); }); test("JSON stringValue with number parameters definition", () => { const dt = new DeploymentTemplate("{ 'parameters': 21 }", fakeId); assert.deepStrictEqual("{ 'parameters': 21 }", dt.documentText); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); assert.deepStrictEqual([], dt.topLevelScope.parameterDefinitions); }); test("JSON stringValue with empty object parameters definition", () => { const dt = new DeploymentTemplate("{ 'parameters': {} }", fakeId); assert.deepStrictEqual("{ 'parameters': {} }", dt.documentText); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); assert.deepStrictEqual([], dt.topLevelScope.parameterDefinitions); }); test("JSON stringValue with one parameter definition", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'num': { 'type': 'number' } } }", fakeId); assert.deepStrictEqual("{ 'parameters': { 'num': { 'type': 'number' } } }", dt.documentText); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); const parameterDefinitions: IParameterDefinition[] = dt.topLevelScope.parameterDefinitions; assert(parameterDefinitions); assert.deepStrictEqual(parameterDefinitions.length, 1); const pd0: IParameterDefinition = parameterDefinitions[0]; assert(pd0); assert.deepStrictEqual(pd0.nameValue.toString(), "num"); assert.deepStrictEqual(pd0.description, undefined); assert.deepStrictEqual(pd0.fullSpan, new Language.Span(18, 27)); }); test("JSON stringValue with one parameter definition with undefined description", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'num': { 'type': 'number', 'metadata': { 'description': null } } } }", fakeId); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); const parameterDefinitions: IParameterDefinition[] = dt.topLevelScope.parameterDefinitions; assert(parameterDefinitions); assert.deepStrictEqual(parameterDefinitions.length, 1); const pd0: IParameterDefinition = parameterDefinitions[0]; assert(pd0); assert.deepStrictEqual(pd0.nameValue.toString(), "num"); assert.deepStrictEqual(pd0.description, undefined); assert.deepStrictEqual(pd0.fullSpan, new Language.Span(18, 64)); }); test("JSON stringValue with one parameter definition with empty description", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'num': { 'type': 'number', 'metadata': { 'description': '' } } } }", fakeId); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); const parameterDefinitions: IParameterDefinition[] = dt.topLevelScope.parameterDefinitions; assert(parameterDefinitions); assert.deepStrictEqual(parameterDefinitions.length, 1); const pd0: IParameterDefinition = parameterDefinitions[0]; assert(pd0); assert.deepStrictEqual(pd0.nameValue.toString(), "num"); assert.deepStrictEqual(pd0.description, ""); assert.deepStrictEqual(pd0.fullSpan, new Language.Span(18, 62)); }); test("JSON stringValue with one parameter definition with non-empty description", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'num': { 'type': 'number', 'metadata': { 'description': 'num description' } } } }", fakeId); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); const parameterDefinitions: IParameterDefinition[] = dt.topLevelScope.parameterDefinitions; assert(parameterDefinitions); assert.deepStrictEqual(parameterDefinitions.length, 1); const pd0: IParameterDefinition = parameterDefinitions[0]; assert(pd0); assert.deepStrictEqual(pd0.nameValue.toString(), "num"); assert.deepStrictEqual(pd0.description, "num description"); assert.deepStrictEqual(pd0.fullSpan, new Language.Span(18, 77)); }); test("JSON stringValue with number variable definitions", () => { const dt = new DeploymentTemplate("{ 'variables': 12 }", fakeId); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); assert.deepStrictEqual("{ 'variables': 12 }", dt.documentText); assert.deepStrictEqual([], dt.topLevelScope.variableDefinitions); }); test("JSON stringValue with one variable definition", () => { const dt: DeploymentTemplate = new DeploymentTemplate("{ 'variables': { 'a': 'A' } }", fakeId); assert.deepStrictEqual(dt.documentId, fakeId); assert.deepStrictEqual(dt.documentText, "{ 'variables': { 'a': 'A' } }"); assert.deepStrictEqual(dt.topLevelScope.variableDefinitions.length, 1); assert.deepStrictEqual(dt.topLevelScope.variableDefinitions[0].nameValue.toString(), "a"); const variableDefinition: Json.StringValue | undefined = Json.asStringValue(dt.topLevelScope.variableDefinitions[0].value); if (!variableDefinition) { throw new Error("failed"); } assert.deepStrictEqual(variableDefinition.span, new Language.Span(22, 3)); assert.deepStrictEqual(variableDefinition.toString(), "A"); }); test("JSON stringValue with two variable definitions", () => { const dt = new DeploymentTemplate("{ 'variables': { 'a': 'A', 'b': 2 } }", fakeId); assert.deepStrictEqual(fakeId.fsPath, dt.documentId.fsPath); assert.deepStrictEqual("{ 'variables': { 'a': 'A', 'b': 2 } }", dt.documentText); assert.deepStrictEqual(dt.topLevelScope.variableDefinitions.length, 2); assert.deepStrictEqual(dt.topLevelScope.variableDefinitions[0].nameValue.toString(), "a"); const a: Json.StringValue | undefined = Json.asStringValue(dt.topLevelScope.variableDefinitions[0].value); if (!a) { throw new Error("failed"); } assert.deepStrictEqual(a.span, new Language.Span(22, 3)); assert.deepStrictEqual(a.toString(), "A"); assert.deepStrictEqual(dt.topLevelScope.variableDefinitions[1].nameValue.toString(), "b"); const b: Json.NumberValue | undefined = Json.asNumberValue(dt.topLevelScope.variableDefinitions[1].value); if (!b) { throw new Error("failed"); } assert.deepStrictEqual(b.span, new Language.Span(32, 1)); }); }); suite("errors", () => { test("with empty deployment template", () => { const dt = new DeploymentTemplate("", fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, []); }); }); test("with empty object deployment template", () => { const dt = new DeploymentTemplate("{}", fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, []); }); }); test("with one property deployment template", () => { const dt = new DeploymentTemplate("{ 'name': 'value' }", fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, []); }); }); test("with one TLE parse error deployment template", () => { const dt = new DeploymentTemplate("{ 'name': '[concat()' }", fakeId); const expectedErrors = [ new Language.Issue(new Language.Span(20, 1), "Expected a right square bracket (']').", tleSyntax) ]; return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, expectedErrors); }); }); test("with one undefined parameter error deployment template", () => { const dt = new DeploymentTemplate("{ 'name': '[parameters(\"test\")]' }", fakeId); const expectedErrors = [ new Language.Issue(new Language.Span(23, 6), "Undefined parameter reference: \"test\"", IssueKind.undefinedParam) ]; return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, expectedErrors); }); }); test("with one undefined variable error deployment template", () => { const dt = new DeploymentTemplate("{ 'name': '[variables(\"test\")]' }", fakeId); const expectedErrors = [ new Language.Issue(new Language.Span(22, 6), "Undefined variable reference: \"test\"", IssueKind.undefinedVar) ]; return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, expectedErrors); }); }); test("with one unrecognized user namespace error deployment template", () => { const dt = new DeploymentTemplate("{ \"name\": \"[namespace.blah('test')]\" }", fakeId); const expectedErrors = [ new UnrecognizedUserNamespaceIssue(new Language.Span(12, 9), "namespace") ]; return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, expectedErrors); }); }); test("with one unrecognized user function error deployment template", () => { const dt = new DeploymentTemplate( stringify({ "name": "[contoso.blah('prefix')]", "functions": [ { "namespace": "contoso", "members": { "uniqueName": { "parameters": [ { "name": "namePrefix", "type": "string" } ], "output": { "type": "string", "value": "[concat('a')]" } } } } ] }), fakeId); const expectedErrors = [ new UnrecognizedUserFunctionIssue(new Language.Span(22, 4), "contoso", "blah") ]; return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, expectedErrors); }); }); test("with one user function referenced in deployment template", () => { const dt = new DeploymentTemplate( `{ "name": "[contoso.uniqueName('prefix')]", "functions": [ { "namespace": "contoso", "members": { "uniqueName": { "parameters": [ { "name": "namePrefix", "type": "string" } ] } } } ] }`, fakeId); const expectedErrors: string[] = [ ]; return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, expectedErrors); }); }); test("with one user function where function name matches a built-in function name", async () => { await parseTemplate( // tslint:disable-next-line:no-any <IDeploymentTemplate><any>{ "name": "[contoso.reference()]", // This is not a call to the built-in "reference" function "functions": [ { "namespace": "contoso", "members": { "reference": { } } } ] }, []); }); test("with one unrecognized user function where function name matches a built-in function name", () => { const dt = new DeploymentTemplate( stringify({ "name": "[contoso.reference()]", "functions": [ { "namespace": "contoso", "members": { "uniqueName": { "parameters": [ { "name": "whatever", "type": "string" } ] } } } ] }), fakeId); const expectedErrors = [ new UnrecognizedUserFunctionIssue(new Language.Span(22, 9), "contoso", "reference") ]; return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual(errors, expectedErrors); }); }); test("can't reference variables from within user function", async () => { const dt = new DeploymentTemplate( stringify( { "name": "hello", "variables": { "nope": "nope" }, "functions": [ { "namespace": "contoso", "members": { "foo": { "output": { "type": "string", "value": "[concat(variables('nope'))]" } } } } ] }), fakeId); const expectedErrors = [ new Language.Issue(new Language.Span(243, 6), "User functions cannot reference variables", IssueKind.varInUdf) ]; const errors: Language.Issue[] = await dt.getErrors(undefined); assert.deepStrictEqual(errors, expectedErrors); }); test("with reference() call in variable definition", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "[reference('test')]" } }`, fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual( errors, [new Language.Issue(new Language.Span(24, 9), "reference() cannot be invoked inside of a variable definition.", IssueKind.referenceInVar)] ); }); }); test("Calling user function with name 'reference' okay in variables", async () => { const template = // tslint:disable-next-line:no-any <IDeploymentTemplate><any>{ "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", "functions": [ { "namespace": "udf", "members": { "reference": { "output": { "value": true, "type": "BOOL" } } } } ], "resources": [ ], "variables": { "v1": "[udf.reference()]" }, "outputs": { "v1Output": { "type": "bool", "value": "[variables('v1')]" } } }; await parseTemplate(template, []); }); test("with reference() call inside a different expression in a variable definition", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "[concat(reference('test'))]" } }`, fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual( errors, [new Language.Issue(new Language.Span(31, 9), "reference() cannot be invoked inside of a variable definition.", IssueKind.referenceInVar)]); }); }); test("with unnamed property access on variable reference", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": {} }, "z": "[variables('a').]" }`, fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual( errors, [new Language.Issue(new Language.Span(50, 1), "Expected a literal value.", tleSyntax)]); }); }); test("with property access on variable reference without variable name", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": {} }, "z": "[variables().b]" }`, fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual( errors, [new IncorrectArgumentsCountIssue(new Language.Span(35, 11), "The function 'variables' takes 1 argument.", "variables", 0, 1, 1)]); }); }); test("with property access on string variable reference", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "A" }, "z": "[variables('a').b]" }`, fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual( errors, [new Language.Issue(new Language.Span(51, 1), `Property "b" is not a defined property of "variables('a')".`, IssueKind.undefinedVarProp)]); }); }); test("with undefined variable reference child property", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": {} }, "z": "[variables('a').b]" }`, fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual( errors, [new Language.Issue(new Language.Span(50, 1), `Property "b" is not a defined property of "variables('a')".`, IssueKind.undefinedVarProp)]); }); }); test("with undefined variable reference grandchild property", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": { "b": {} } }, "z": "[variables('a').b.c]" }`, fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual( errors, [new Language.Issue(new Language.Span(61, 1), `Property "c" is not a defined property of "variables('a').b".`, IssueKind.undefinedVarProp)]); }); }); test("with undefined variable reference child and grandchild properties", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": { "d": {} } }, "z": "[variables('a').b.c]" }`, fakeId); return dt.getErrors(undefined).then((errors: Language.Issue[]) => { assert.deepStrictEqual( errors, [new Language.Issue(new Language.Span(59, 1), `Property "b" is not a defined property of "variables('a')".`, IssueKind.undefinedVarProp)]); }); }); }); suite("warnings", () => { test("with unused parameter", () => { const dt = new DeploymentTemplate(`{ "parameters": { "a": {} } }`, fakeId); assert.deepStrictEqual( dt.getWarnings(), [new Language.Issue(new Language.Span(18, 3), "The parameter 'a' is never used.", IssueKind.unusedParam)]); }); test("with no unused parameters", async () => { const dt = new DeploymentTemplate(`{ "parameters": { "a": {} }, "b": "[parameters('a')] }`, fakeId); assert.deepStrictEqual(dt.getWarnings(), []); assert.deepStrictEqual(dt.getWarnings(), []); }); test("with unused variable", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "A" } }`, fakeId); assert.deepStrictEqual( dt.getWarnings(), [new Language.Issue(new Language.Span(17, 3), "The variable 'a' is never used.", IssueKind.unusedVar)]); }); test("with no unused variables", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "A" }, "b": "[variables('a')] }`, fakeId); assert.deepStrictEqual(dt.getWarnings(), []); assert.deepStrictEqual(dt.getWarnings(), []); }); }); suite("get functionCounts()", () => { test("with empty deployment template", () => { const dt = new DeploymentTemplate("", fakeId); const expectedHistogram = new Histogram(); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); }); test("with empty object deployment template", () => { const dt = new DeploymentTemplate("{}", fakeId); const expectedHistogram = new Histogram(); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); }); test("with one property object deployment template", () => { const dt = new DeploymentTemplate("{ 'name': 'value' }", fakeId); const expectedHistogram = new Histogram(); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); }); test("with one TLE function used multiple times in deployment template", () => { const dt = new DeploymentTemplate("{ 'variables': { 'name': '[concat()]', 'name2': '[concat(1, 2)]', 'name3': '[concat(2, 3)]' } }", fakeId); const expectedHistogram = new Histogram(); expectedHistogram.add("concat"); expectedHistogram.add("concat"); expectedHistogram.add("concat"); expectedHistogram.add("concat(0)"); expectedHistogram.add("concat(2)"); expectedHistogram.add("concat(2)"); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); }); test("with two TLE functions in different TLEs deployment template", () => { const dt = new DeploymentTemplate(`{ "name": "[concat()]", "height": "[add()]" }`, fakeId); const expectedHistogram = new Histogram(); expectedHistogram.add("concat"); expectedHistogram.add("concat(0)"); expectedHistogram.add("add"); expectedHistogram.add("add(0)"); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); assert.deepStrictEqual(expectedHistogram, dt.getFunctionCounts()); }); test("with the same string repeated in multiple places (each use should get counted once, even though the strings are the exact same and may be cached)", () => { const dt = new DeploymentTemplate("{ 'name': '[concat()]', 'height': '[concat()]', 'width': \"[concat()]\" }", fakeId); assert.deepStrictEqual(3, dt.getFunctionCounts().getCount("concat(0)")); assert.deepStrictEqual(3, dt.getFunctionCounts().getCount("concat")); }); }); suite("get jsonParseResult()", () => { test("with empty deployment template", () => { const dt = new DeploymentTemplate("", fakeId); assert(dt.jsonParseResult); assert.equal(0, dt.jsonParseResult.tokenCount); }); test("with empty object deployment template", () => { const dt = new DeploymentTemplate("{}", fakeId); assert(dt.jsonParseResult); assert.equal(2, dt.jsonParseResult.tokenCount); }); test("With comments", () => { const dt = new DeploymentTemplate( `// Look, Ma { // No hands! /* This is not the right schema "$schema": "http://schema.ohwell.azure.com/schemas/2015-01-01/wrongTemplate.json#", */ "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", "parameters": { "storageAccountName": { "type": "string" } } }`, fakeId ); assert.equal(dt.schemaUri, "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"); assert.notEqual(dt.topLevelScope.getParameterDefinition("storageAccountName"), null); }); }); suite("get parameterDefinitions()", () => { test("with no parameters property", () => { const dt = new DeploymentTemplate("{}", fakeId); assert.deepStrictEqual(dt.topLevelScope.parameterDefinitions, []); }); test("with undefined parameters property", () => { const dt = new DeploymentTemplate("{ 'parameters': undefined }", fakeId); assert.deepStrictEqual(dt.topLevelScope.parameterDefinitions, []); }); test("with string parameters property", () => { const dt = new DeploymentTemplate("{ 'parameters': 'hello' }", fakeId); assert.deepStrictEqual(dt.topLevelScope.parameterDefinitions, []); }); test("with number parameters property", () => { const dt = new DeploymentTemplate("{ 'parameters': 1 }", fakeId); assert.deepStrictEqual(dt.topLevelScope.parameterDefinitions, []); }); test("with empty object parameters property", () => { const dt = new DeploymentTemplate("{ 'parameters': {} }", fakeId); assert.deepStrictEqual(dt.topLevelScope.parameterDefinitions, []); }); test("with empty object parameter", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'a': {} } }", fakeId); const parameterDefinitions: IParameterDefinition[] = dt.topLevelScope.parameterDefinitions; assert(parameterDefinitions); assert.deepStrictEqual(parameterDefinitions.length, 1); const pd0: IParameterDefinition = parameterDefinitions[0]; assert(pd0); assert.deepStrictEqual(pd0.nameValue.toString(), "a"); assert.deepStrictEqual(pd0.description, undefined); assert.deepStrictEqual(pd0.fullSpan, new Language.Span(18, 7)); }); test("with parameter with metadata but no description", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'a': { 'metadata': {} } } }", fakeId); const parameterDefinitions: IParameterDefinition[] = dt.topLevelScope.parameterDefinitions; assert(parameterDefinitions); assert.deepStrictEqual(parameterDefinitions.length, 1); const pd0: IParameterDefinition = parameterDefinitions[0]; assert(pd0); assert.deepStrictEqual(pd0.nameValue.toString(), "a"); assert.deepStrictEqual(pd0.description, undefined); assert.deepStrictEqual(pd0.fullSpan, new Language.Span(18, 23)); }); test("with parameter with metadata and description", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'a': { 'metadata': { 'description': 'b' } } } }", fakeId); const parameterDefinitions: IParameterDefinition[] = dt.topLevelScope.parameterDefinitions; assert(parameterDefinitions); assert.deepStrictEqual(parameterDefinitions.length, 1); const pd0: IParameterDefinition = parameterDefinitions[0]; assert(pd0); assert.deepStrictEqual(pd0.nameValue.toString(), "a"); assert.deepStrictEqual(pd0.description, "b"); assert.deepStrictEqual(pd0.fullSpan, new Language.Span(18, 43)); }); }); suite("getParameterDefinition(string)", () => { test("with undefined", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); // tslint:disable-next-line:no-any assert.throws(() => { dt.topLevelScope.getParameterDefinition(<any>undefined); }); }); test("with undefined", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); // tslint:disable-next-line:no-any assert.throws(() => { dt.topLevelScope.getParameterDefinition(<any>undefined); }); }); test("with empty", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); assert.throws(() => { dt.topLevelScope.getParameterDefinition(""); }); }); test("with no parameters definition", () => { const dt = new DeploymentTemplate("{}", fakeId); assert.deepStrictEqual(undefined, dt.topLevelScope.getParameterDefinition("spam")); }); test("with unquoted non-match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); assert.deepStrictEqual(undefined, dt.topLevelScope.getParameterDefinition("spam")); }); test("with one-sided-quote non-match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); assert.deepStrictEqual(undefined, dt.topLevelScope.getParameterDefinition("'spam")); }); test("with quoted non-match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); assert.deepStrictEqual(undefined, dt.topLevelScope.getParameterDefinition("'spam'")); }); test("with unquoted match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); const apples: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("apples"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "apples"); assert.deepStrictEqual(apples.description, undefined); assert.deepStrictEqual(apples.fullSpan, new Language.Span(18, 30)); assert.deepStrictEqual(apples.nameValue.span, new Language.Span(18, 8), "Wrong name.span"); assert.deepStrictEqual(apples.nameValue.unquotedSpan, new Language.Span(19, 6), "Wrong name.unquotedSpan"); }); test("with one-sided-quote match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); const apples: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("'apples"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "apples"); assert.deepStrictEqual(apples.description, undefined); assert.deepStrictEqual(apples.fullSpan, new Language.Span(18, 30)); }); test("with quoted match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); const apples: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("'apples'"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "apples"); assert.deepStrictEqual(apples.description, undefined); assert.deepStrictEqual(apples.fullSpan, new Language.Span(18, 30)); }); test("with case insensitive match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); const apples: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("'APPLES'"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "apples"); assert.deepStrictEqual(apples.description, undefined); assert.deepStrictEqual(apples.fullSpan, new Language.Span(18, 30)); }); test("with multiple case insensitive matches", () => { const dt = new DeploymentTemplate( stringify( { 'parameters': { 'apples': { 'type': 'string' }, 'APPLES': { 'type': 'integer' }, 'Apples': { 'type': 'securestring' } } }), fakeId); // Should always match the last one defined when multiple have the same name const APPLES: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("'APPLES'"); if (!APPLES) { throw new Error("failed"); } assert.deepStrictEqual(APPLES.nameValue.toString(), "Apples"); const apples: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("'APPles'"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "Apples"); }); // CONSIDER: Does JavaScript support this? It's low priority // test("with case insensitive match, Unicode", () => { // // Should always match the last one defined when multiple have the same name // const dt = new DeploymentTemplate("{ 'parameters': { 'Strasse': { 'type': 'string' }, 'Straße': { 'type': 'integer' } } }",fakeId); // const strasse: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("'Strasse'"); // if (!strasse) { throw new Error("failed"); } // assert.deepStrictEqual(strasse.nameValue.toString(), "Straße"); // const straße: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("'Straße'"); // if (!straße) { throw new Error("failed"); } // assert.deepStrictEqual(straße.nameValue.toString(), "Straße"); // const straße2: IParameterDefinition | undefined = dt.topLevelScope.getParameterDefinition("'STRASSE'"); // if (!straße2) { throw new Error("failed"); } // assert.deepStrictEqual(straße2.nameValue.toString(), "Straße"); // }); }); suite("findParameterDefinitionsWithPrefix(string)", () => { test("with undefined", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); // tslint:disable-next-line:no-any assert.throws(() => { dt.topLevelScope.findParameterDefinitionsWithPrefix(<any>undefined); }); }); test("with undefined", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); // tslint:disable-next-line:no-any assert.throws(() => { dt.topLevelScope.findParameterDefinitionsWithPrefix(<any>undefined); }); }); test("with empty", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); const matches: IParameterDefinition[] = dt.topLevelScope.findParameterDefinitionsWithPrefix(""); assert(matches); assert.deepStrictEqual(matches.length, 2); const match0: IParameterDefinition = matches[0]; assert(match0); assert.deepStrictEqual(match0.nameValue.toString(), "apples"); assert.deepStrictEqual(match0.description, undefined); assert.deepStrictEqual(match0.fullSpan, new Language.Span(18, 30)); const match1: IParameterDefinition = matches[1]; assert(match1); assert.deepStrictEqual(match1.nameValue.toString(), "bananas"); assert.deepStrictEqual(match1.description, undefined); assert.deepStrictEqual(match1.fullSpan, new Language.Span(50, 32)); }); test("with prefix of one of the parameters", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); const matches: IParameterDefinition[] = dt.topLevelScope.findParameterDefinitionsWithPrefix("ap"); assert(matches); assert.deepStrictEqual(matches.length, 1); const match0: IParameterDefinition = matches[0]; assert(match0); assert.deepStrictEqual(match0.nameValue.toString(), "apples"); assert.deepStrictEqual(match0.description, undefined); assert.deepStrictEqual(match0.fullSpan, new Language.Span(18, 30)); }); test("with prefix of none of the parameters", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); assert.deepStrictEqual(dt.topLevelScope.findParameterDefinitionsWithPrefix("ca"), []); }); test("with case insensitive match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'bananas': { 'type': 'integer' } } }", fakeId); const matches: IParameterDefinition[] = dt.topLevelScope.findParameterDefinitionsWithPrefix("APP"); assert(matches); assert.deepStrictEqual(matches.length, 1); const match0: IParameterDefinition = matches[0]; assert(match0); assert.deepStrictEqual(match0.nameValue.toString(), "apples"); assert.deepStrictEqual(match0.description, undefined); assert.deepStrictEqual(match0.fullSpan, new Language.Span(18, 30)); }); test("with case sensitive and insensitive match", () => { const dt = new DeploymentTemplate("{ 'parameters': { 'apples': { 'type': 'string' }, 'APPLES': { 'type': 'integer' } } }", fakeId); const matches: IParameterDefinition[] = dt.topLevelScope.findParameterDefinitionsWithPrefix("APP"); assert(matches); assert.deepStrictEqual(matches.length, 2); const match0: IParameterDefinition = matches[0]; assert(match0); assert.deepStrictEqual(match0.nameValue.toString(), "apples"); assert.deepStrictEqual(match0.description, undefined); assert.deepStrictEqual(match0.fullSpan, new Language.Span(18, 30)); const match1: IParameterDefinition = matches[1]; assert(match1); assert.deepStrictEqual(match1.nameValue.toString(), "APPLES"); assert.deepStrictEqual(match1.description, undefined); assert.deepStrictEqual(match1.fullSpan, new Language.Span(50, 31)); }); }); suite("getVariableDefinition(string)", () => { test("with undefined", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); // tslint:disable-next-line:no-any assert.throws(() => { dt.topLevelScope.getVariableDefinition(<any>undefined); }); }); test("with undefined", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); // tslint:disable-next-line:no-any assert.throws(() => { dt.topLevelScope.getVariableDefinition(<any>undefined); }); }); test("with empty", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); assert.throws(() => { dt.topLevelScope.getVariableDefinition(""); }); }); test("with no variables definition", () => { const dt = new DeploymentTemplate("{}", fakeId); assert.deepStrictEqual(undefined, dt.topLevelScope.getVariableDefinition("spam")); }); test("with unquoted non-match", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); assert.deepStrictEqual(undefined, dt.topLevelScope.getVariableDefinition("spam")); }); test("with one-sided-quote non-match", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); assert.deepStrictEqual(undefined, dt.topLevelScope.getVariableDefinition("'spam")); }); test("with quoted non-match", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); assert.deepStrictEqual(undefined, dt.topLevelScope.getVariableDefinition("'spam'")); }); test("with unquoted match", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); const apples: IVariableDefinition | undefined = dt.topLevelScope.getVariableDefinition("apples"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "apples"); const value: Json.Value | undefined = Json.asStringValue(apples.value); if (!value) { throw new Error("failed"); } assert.deepStrictEqual(value.span, new Language.Span(27, 5)); assert.deepStrictEqual(value.toString(), "yum"); }); test("with one-sided-quote match", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); const apples: IVariableDefinition | undefined = dt.topLevelScope.getVariableDefinition("'apples"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "apples"); const value: Json.StringValue | undefined = Json.asStringValue(apples.value); if (!value) { throw new Error("failed"); } assert.deepStrictEqual(value.span, new Language.Span(27, 5)); assert.deepStrictEqual(value.toString(), "yum"); }); test("with quoted match", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); const apples: IVariableDefinition | undefined = dt.topLevelScope.getVariableDefinition("'apples'"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "apples"); const value: Json.StringValue | undefined = Json.asStringValue(apples.value); if (!value) { throw new Error("failed"); } assert.deepStrictEqual(value.span, new Language.Span(27, 5)); assert.deepStrictEqual(value.toString(), "yum"); }); test("with case insensitive match", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'bananas': 'good' } }", fakeId); const apples: IVariableDefinition | undefined = dt.topLevelScope.getVariableDefinition("'APPLES"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "apples"); const value: Json.StringValue | undefined = Json.asStringValue(apples.value); if (!value) { throw new Error("failed"); } assert.deepStrictEqual(value.span, new Language.Span(27, 5)); assert.deepStrictEqual(value.toString(), "yum"); }); test("with multiple case insensitive matches", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'yum', 'APPLES': 'good' } }", fakeId); // Should always find the last definition, because that's what Azure does const APPLES: IVariableDefinition | undefined = dt.topLevelScope.getVariableDefinition("'APPLES'"); if (!APPLES) { throw new Error("failed"); } assert.deepStrictEqual(APPLES.nameValue.toString(), "APPLES"); const applesValue: Json.StringValue | undefined = Json.asStringValue(APPLES.value); if (!applesValue) { throw new Error("failed"); } assert.deepStrictEqual(applesValue.toString(), "good"); const apples: IVariableDefinition | undefined = dt.topLevelScope.getVariableDefinition("'APPles'"); if (!apples) { throw new Error("failed"); } assert.deepStrictEqual(apples.nameValue.toString(), "APPLES"); const value: Json.StringValue | undefined = Json.asStringValue(apples.value); if (!value) { throw new Error("failed"); } assert.deepStrictEqual(value.toString(), "good"); }); }); // end suite getVariableDefinition suite("findVariableDefinitionsWithPrefix(string)", () => { test("with undefined", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'APPLES', 'bananas': 88 } }", fakeId); // tslint:disable-next-line:no-any assert.throws(() => { dt.topLevelScope.findVariableDefinitionsWithPrefix(<any>undefined); }); }); test("with undefined", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'APPLES', 'bananas': 88 } }", fakeId); // tslint:disable-next-line:no-any assert.throws(() => { dt.topLevelScope.findVariableDefinitionsWithPrefix(<any>undefined); }); }); test("with empty", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'APPLES', 'bananas': 88 } }", fakeId); const definitions: IVariableDefinition[] = dt.topLevelScope.findVariableDefinitionsWithPrefix(""); assert.deepStrictEqual(definitions.length, 2); const apples: IVariableDefinition = definitions[0]; assert.deepStrictEqual(apples.nameValue.toString(), "apples"); const applesValue: Json.StringValue | undefined = Json.asStringValue(apples.value); if (!applesValue) { throw new Error("failed"); } assert.deepStrictEqual(applesValue.span, new Language.Span(27, 8)); assert.deepStrictEqual(applesValue.toString(), "APPLES"); const bananas: IVariableDefinition = definitions[1]; assert.deepStrictEqual(bananas.nameValue.toString(), "bananas"); const bananasValue: Json.NumberValue | undefined = Json.asNumberValue(bananas.value); assert.deepStrictEqual(bananasValue!.span, new Language.Span(48, 2)); }); test("with prefix of one of the variables", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'APPLES', 'bananas': 88 } }", fakeId); const definitions: IVariableDefinition[] = dt.topLevelScope.findVariableDefinitionsWithPrefix("ap"); assert.deepStrictEqual(definitions.length, 1); const apples: IVariableDefinition = definitions[0]; assert.deepStrictEqual(apples.nameValue.toString(), "apples"); const applesValue: Json.StringValue | undefined = Json.asStringValue(apples.value); if (!applesValue) { throw new Error("failed"); } assert.deepStrictEqual(applesValue.span, new Language.Span(27, 8)); assert.deepStrictEqual(applesValue.toString(), "APPLES"); }); test("with prefix of none of the variables", () => { const dt = new DeploymentTemplate("{ 'variables': { 'apples': 'APPLES', 'bananas': 88 } }", fakeId); assert.deepStrictEqual([], dt.topLevelScope.findVariableDefinitionsWithPrefix("ca")); }); }); suite("getContextFromDocumentLineAndColumnIndexes(number, number)", () => { test("with empty deployment template", () => { const dt = new DeploymentTemplate("", fakeId); const context = dt.getContextFromDocumentLineAndColumnIndexes(0, 0, undefined); assert(context); assert.equal(0, context.documentLineIndex); assert.equal(0, context.documentColumnIndex); assert.equal(0, context.documentCharacterIndex); }); }); suite("findReferences(Reference.Type, string)", () => { test("with parameter type and no matching parameter definition", () => { const dt = new DeploymentTemplate(`{ "parameters": { "pName": {} } }`, fakeId); const list: ReferenceList = findReferences(dt, DefinitionKind.Parameter, "dontMatchMe", dt.topLevelScope); assert(list); assert.deepStrictEqual(list.kind, DefinitionKind.Parameter); assert.deepStrictEqual(list.references, []); }); test("with parameter type and matching parameter definition", () => { const dt = new DeploymentTemplate(`{ "parameters": { "pName": {} } }`, fakeId); const list: ReferenceList = findReferences(dt, DefinitionKind.Parameter, "pName", dt.topLevelScope); assert(list); assert.deepStrictEqual(list.kind, DefinitionKind.Parameter); assert.deepStrictEqual(list.references.map(r => r.span), [new Language.Span(19, 5)]); }); test("with variable type and no matching variable definition", () => { const dt = new DeploymentTemplate(`{ "variables": { "vName": {} } }`, fakeId); const list: ReferenceList = findReferences(dt, DefinitionKind.Variable, "dontMatchMe", dt.topLevelScope); assert(list); assert.deepStrictEqual(list.kind, DefinitionKind.Variable); assert.deepStrictEqual(list.references.map(r => r.span), []); }); test("with variable type and matching variable definition", () => { const dt = new DeploymentTemplate(`{ "variables": { "vName": {} } }`, fakeId); const list: ReferenceList = findReferences(dt, DefinitionKind.Variable, "vName", dt.topLevelScope); assert(list); assert.deepStrictEqual(list.kind, DefinitionKind.Variable); assert.deepStrictEqual(list.references.map(r => r.span), [new Language.Span(18, 5)]); }); }); // findReferences suite("ReferenceInVariableDefinitionJSONVisitor", () => { suite("constructor(DeploymentTemplate)", () => { test("with undefined", () => { // tslint:disable-next-line:no-any assert.throws(() => { new ReferenceInVariableDefinitionsVisitor(<any>undefined); }); }); test("with undefined", () => { // tslint:disable-next-line:no-any assert.throws(() => { new ReferenceInVariableDefinitionsVisitor(<any>undefined); }); }); test("with deploymentTemplate", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "[reference('test')]" } }`, fakeId); const visitor = new ReferenceInVariableDefinitionsVisitor(dt); assert.deepStrictEqual(visitor.referenceSpans, []); }); testWithLanguageServer("expecting error: reference in variable definition", async function (this: ITestCallbackContext): Promise<void> { await testDiagnostics( { "variables": { "a": "[reference('test')]" }, }, { includeSources: [sources.expressions] }, [ "Error: reference() cannot be invoked inside of a variable definition. (arm-template (expressions))", "Warning: The variable 'a' is never used. (arm-template (expressions))" ]); }); testWithLanguageServer("expecting error: reference in variable definition inside user function", async function (this: ITestCallbackContext): Promise<void> { await testDiagnostics( { "variables": { "a": "[reference('test')]" }, }, { includeSources: [sources.expressions] }, [ "Error: reference() cannot be invoked inside of a variable definition. (arm-template (expressions))", "Warning: The variable 'a' is never used. (arm-template (expressions))" ]); }); }); suite("visitStringValue(Json.StringValue)", () => { test("with undefined", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "[reference('test')]" } }`, fakeId); const visitor = new ReferenceInVariableDefinitionsVisitor(dt); // tslint:disable-next-line:no-any assert.throws(() => { visitor.visitStringValue(<any>undefined); }); }); test("with undefined", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "[reference('test')]" } }`, fakeId); const visitor = new ReferenceInVariableDefinitionsVisitor(dt); // tslint:disable-next-line:no-any assert.throws(() => { visitor.visitStringValue(<any>undefined); }); }); test("with non-TLE string", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "[reference('test')]" } }`, fakeId); const visitor = new ReferenceInVariableDefinitionsVisitor(dt); const variables: Json.StringValue = Json.asObjectValue(dt.jsonParseResult.value)!.properties[0].nameValue; visitor.visitStringValue(variables); assert.deepStrictEqual(visitor.referenceSpans, []); }); test("with TLE string with reference() call", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "[reference('test')]" } }`, fakeId); const visitor = new ReferenceInVariableDefinitionsVisitor(dt); const dtObject: Json.ObjectValue | undefined = Json.asObjectValue(dt.jsonParseResult.value); const variablesObject: Json.ObjectValue | undefined = Json.asObjectValue(dtObject!.getPropertyValue("variables")); const tle: Json.StringValue | undefined = Json.asStringValue(variablesObject!.getPropertyValue("a")); visitor.visitStringValue(tle!); assert.deepStrictEqual(visitor.referenceSpans, [new Language.Span(24, 9)]); }); test("with TLE string with reference() call inside concat() call", () => { const dt = new DeploymentTemplate(`{ "variables": { "a": "[concat(reference('test'))]" } }`, fakeId); const visitor = new ReferenceInVariableDefinitionsVisitor(dt); const dtObject: Json.ObjectValue | undefined = Json.asObjectValue(dt.jsonParseResult.value); const variablesObject: Json.ObjectValue | undefined = Json.asObjectValue(dtObject!.getPropertyValue("variables")); const tle: Json.StringValue | undefined = Json.asStringValue(variablesObject!.getPropertyValue("a")); visitor.visitStringValue(tle!); assert.deepStrictEqual(visitor.referenceSpans, [new Language.Span(31, 9)]); }); }); }); suite("Incomplete JSON shouldn't cause crash", function (this: ISuiteCallbackContext): void { this.timeout(60000); async function exercisePositionContextAtEveryPointInTheDoc(json: string): Promise<void> { await exercisePositionContextAtRandomPointsInTheDoc(json, json.length + 1); // length+1 so we include past the last character as a position } async function exercisePositionContextAtRandomPointsInTheDoc(json: string, numberOfIndicesToTest: number): Promise<void> { if (numberOfIndicesToTest < 1) { // Take it as a probability of doing a single sample if (Math.random() > numberOfIndicesToTest) { return; } } for (let i = 0; i < numberOfIndicesToTest; ++i) { let index = i; if (numberOfIndicesToTest <= json.length) { index = Math.floor(Math.random() * (json.length + 1)); // length+1 so we include past the last character as a position } // console.log(`Testing index ${index}`); try { // Just make sure nothing throws let dt = new DeploymentTemplate(json, fakeId); let pc = dt.getContextFromDocumentCharacterIndex(index, undefined); pc.getReferences(); pc.getSignatureHelp(); pc.tleInfo; pc.getReferenceSiteInfo(true); pc.getHoverInfo(); pc.getCompletionItems(); } catch (err) { throw new Error(`exercisePositionContextAtRandomPointsInTheDoc: Threw at index ${i}:\n${json.slice(i)}<***HERE***>${json.slice(i)}`); } } } const template: string = `{ "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", "parameters": { "location": { "type": "string" }, "networkInterfaceName": { "type": "string" }, }, "variables": { "vnetId": "[resourceId(resourceGroup().name,'Microsoft.Network/virtualNetworks', parameters('virtualNetworkName'))]", }, "resources": [ { "name": "[parameters('networkInterfaceName')]", "type": "Microsoft.Network/networkInterfaces", "apiVersion": "2018-10-01", "location": "[parameters('location')]", "dependsOn": [ "[concat('Microsoft.Network/networkSecurityGroups/', parameters('networkSecurityGroupName'))]", "[concat('Microsoft.Network/virtualNetworks/', parameters('virtualNetworkName'))]", "[concat('Microsoft.Network/publicIpAddresses/', parameters('publicIpAddressName'))]" ], "properties": { "$test-commandToExecute": "[concat('cd /hub*/docker-compose; sudo docker-compose down -t 60; sudo -s source /set_hub_url.sh ', reference(parameters('publicIpName')).dnsSettings.fqdn, '; sudo docker volume rm ''dockercompose_cert-volume''; sudo docker-compose up')]", "ipConfigurations": [ { "name": "ipconfig1", "properties": { "subnet": { "id": "[variables('subnetRef')]" }, "privateIPAllocationMethod": "Dynamic", "publicIpAddress": { "id": "[resourceId(resourceGroup().name, 'Microsoft.Network/publicIpAddresses', parameters('publicIpAddressName'))]" } } } ] }, "tags": {} } ], "outputs": { "adminUsername": { "type": "string", "value": "[parameters('adminUsername')]" } } } `; test("https://github.com/Microsoft/vscode-azurearmtools/issues/193", async () => { // Just make sure nothing throws let modifiedTemplate = template.replace('"type": "string"', '"type": string'); let dt = await parseTemplate(modifiedTemplate); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); }); test("Unended string", async () => { const json = "{ \""; let dt = await parseTemplate(json); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); }); test("No top-level object", async () => { const json = "\"hello\""; let dt = await parseTemplate(json); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); }); test("Malformed property name", async () => { const json = ` { "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", : { "nsgId": "something", "vnetId": "[resourceId(resourceGrou2p().name,'Microsoft.Network/virtualNetworks', parameters('virtualNetworkName'))]", "subnetRef": "[concat(variables('vne2tId'), '/subnets/', parameters('subnetName'))]" } }`; let dt = await parseTemplate(json); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); }); test("Malformed property", async () => { const json = ` { "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", /*missing prop name and colon*/ { "nsgId": "something", "vnetId": "[resourceId(resourceGrou2p().name,'Microsoft.Network/virtualNetworks', parameters('virtualNetworkName'))]", "subnetRef": "[concat(variables('vne2tId'), '/subnets/', parameters('subnetName'))]" } }`; let dt = await parseTemplate(json); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); }); test("typing character by character", async function (this: ITestCallbackContext): Promise<void> { if (DISABLE_SLOW_TESTS) { this.skip(); return; } // Just make sure nothing throws for (let i = 0; i < template.length; ++i) { let partialTemplate = template.slice(0, i); let dt = await parseTemplate(partialTemplate); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); await exercisePositionContextAtRandomPointsInTheDoc(template, 0.1); } }); test("typing backwards character by character", async function (this: ITestCallbackContext): Promise<void> { if (DISABLE_SLOW_TESTS) { this.skip(); return; } // Just make sure nothing throws for (let i = 0; i < template.length; ++i) { let partialTemplate = template.slice(i); let dt = await parseTemplate(partialTemplate); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); await exercisePositionContextAtRandomPointsInTheDoc(template, 0.1); } }); test("try parsing the document with a single character deleted (repeat through the whole document)", async function (this: ITestCallbackContext): Promise<void> { if (DISABLE_SLOW_TESTS) { this.skip(); return; } // Just make sure nothing throws for (let i = 0; i < template.length; ++i) { // Remove the single character at position i let partialTemplate = template.slice(0, i) + template.slice(i + 1); let dt = await parseTemplate(partialTemplate); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); await exercisePositionContextAtRandomPointsInTheDoc(template, 0.1); } }); test("exercise PositionContext at every point in the full json", async function (this: ITestCallbackContext): Promise<void> { if (DISABLE_SLOW_TESTS) { this.skip(); return; } // Just make sure nothing throws await exercisePositionContextAtEveryPointInTheDoc(template); }); test("Random modifications", async function (this: ITestCallbackContext): Promise<void> { if (DISABLE_SLOW_TESTS) { this.skip(); return; } // Just make sure nothing throws let modifiedTemplate: string = template; for (let i = 0; i < 1000; ++i) { if (modifiedTemplate.length > 0 && Math.random() < 0.5) { // Delete some characters let position = Math.random() * (modifiedTemplate.length - 1); let length = Math.random() * Math.max(5, modifiedTemplate.length); modifiedTemplate = modifiedTemplate.slice(position, position + length); } else { // Insert some characters let position = Math.random() * modifiedTemplate.length; let length = Math.random() * 5; let s = randomBytes(length).toString(); modifiedTemplate = modifiedTemplate.slice(0, position) + s + modifiedTemplate.slice(position); } let dt = await parseTemplate(modifiedTemplate); findReferences(dt, DefinitionKind.Parameter, "adminUsername", dt.topLevelScope); findReferences(dt, DefinitionKind.Variable, "resourceGroup", dt.topLevelScope); dt.getFunctionCounts(); await exercisePositionContextAtRandomPointsInTheDoc(template, 0.1); } }); }); //Incomplete JSON shouldn't cause crash suite("getMultilineStringCount", () => { test("TLE strings", async () => { const dt = await parseTemplate(`{ "abc": "[abc def]", "xyz": "[xyz qrs]" }`); assert.equal(dt.getMultilineStringCount(), 2); }); test("JSON strings", async () => { const dt = await parseTemplate(`{ "abc": "abc def" }`); assert.equal(dt.getMultilineStringCount(), 1); }); test("don't count escaped \\n, \\r", async () => { const dt = await parseTemplate(`{ "abc": "abc\\r\\ndef" }`); assert.equal(dt.getMultilineStringCount(), 0); }); }); suite("getMaxLineLength", () => { test("getMaxLineLength", async () => { const dt = await parseTemplate(`{ //345678 //345678901234567890 //345 }`); const maxLineLength = dt.getMaxLineLength(); // Max line length isn't quite exact - it can also includes line break characters assert(maxLineLength >= 20 && maxLineLength <= 20 + 2); }); }); suite("getCommentsCount()", () => { test("no comments", async () => { // tslint:disable-next-line:no-any const dt = await parseTemplate(<any>{ "$schema": "foo", "contentVersion": "1.2.3 /*not a comment*/", "whoever": "1.2.3 //not a comment" }); assert.equal(dt.getCommentCount(), 0); }); test("block comments", async () => { // tslint:disable-next-line:no-any const dt = await parseTemplate(`{ "$schema": "foo", /* This is a comment */ "contentVersion": "1.2.3", "whoever": "1.2.3" /* This is a comment */ }`); assert.equal(dt.getCommentCount(), 2); }); test("single-line comments", async () => { // tslint:disable-next-line:no-any const dt = await parseTemplate(`{ "$schema": "foo", // This is a comment "contentVersion": "1.2.3", // Another comment "whoever": "1.2.3" // This is a comment }`); assert.equal(dt.getCommentCount(), 3); }); }); suite("apiProfile", () => { test("no apiProfile", async () => { const dt = await parseTemplate({ "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#" }); assert.equal(dt.apiProfile, undefined); }); test("empty apiProfile", async () => { const dt = await parseTemplate({ "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", apiProfile: "" }); assert.equal(dt.apiProfile, ""); }); test("non-string apiProfile", async () => { // tslint:disable-next-line: no-any const dt = await parseTemplate(<IDeploymentTemplate><any>{ "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", apiProfile: false }); assert.equal(dt.apiProfile, undefined); }); test("valid apiProfile", async () => { const dt = await parseTemplate({ "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "apiProfile": "2018–03-01-hybrid" }); assert.equal(dt.apiProfile, "2018–03-01-hybrid"); }); }); });
/* * Copyright MIT 1999 */ /* a shared memory implementation of pipe */ #include <stdlib.h> #include <xok/sys_ucall.h> #include <xok/sysinfo.h> #include <vos/fdtypes.h> #include <vos/kprintf.h> #include <vos/ipc.h> #include <vos/vm.h> #include <vos/errno.h> #include <vos/locks.h> #include <vos/assert.h> #include <vos/ipcport.h> #include <vos/fd.h> #define dprintf if (0) kprintf /* spipe: a trusted pipe implemented in shared memory. conventioned yield lock * is used for synchronization! also, once a data is read by one, it is * removed. */ #define SPIPESZ 2048 #define SPIPE_CLOSED 0 #define SPIPE_OPEN 1 typedef struct { yieldlock_t lock; /* trusted yield lock */ char status; /* status of pipe: closed or open */ u_short refcnt; /* reference count used when sharing... * ref cnt for fd is in the fd structure */ u_short head_ptr; /* ptr to first unread char */ u_short free_ptr; /* ptr to first free char */ u_short bytes; /* number of bytes in buffer */ char buf[SPIPESZ]; /* the buffer */ } spipe_state_t; /* * define a spipe structure on the va given */ void spipe_def(u_int va) { spipe_state_t *s = (spipe_state_t*) va; yieldlock_reset(&s->lock); s->status = SPIPE_CLOSED; s->refcnt = 0; s->head_ptr = 0; s->free_ptr = 0; s->bytes = 0; } /* * read buffer from spipe */ static int spipe_read (S_T(fd_entry) *fd, void *buffer, int nbyte) { int size = 0; spipe_state_t *s; if (fd->type != FD_TYPE_SPIPE) RETERR(V_BADFD); if (fd->state == 0L) RETERR(V_BADFD); s = (spipe_state_t*)fd->state; yieldlock_acquire(&s->lock); /* we ar the only reader, abort */ if (s->head_ptr == s->free_ptr && s->refcnt <= 1) { yieldlock_release(&s->lock); RETERR(V_NOTCONN); } else if (FD_ISNB(fd) && (s->head_ptr == s->free_ptr)) { yieldlock_release(&s->lock); RETERR(V_WOULDBLOCK); } wait: if (s->head_ptr == s->free_ptr) /* blocking */ { dprintf("env %d: spipe: read waiting, we have %d %d\n", getpid(),s->free_ptr,s->head_ptr); if (s->refcnt <= 1) { yieldlock_release(&s->lock); RETERR(V_NOTCONN); } yieldlock_release(&s->lock); if (fds[fd->fd]->pri.shared) SR_UNLOCK(fds[fd->fd]); env_yield(-1); if (fds[fd->fd]->pri.shared) SR_LOCK(fds[fd->fd]); yieldlock_acquire(&s->lock); goto wait; } /* more to read */ if (s->head_ptr < s->free_ptr) size = s->free_ptr - s->head_ptr; else size = SPIPESZ - s->head_ptr; /* requestd size is less than available memory */ if (size > nbyte) { size = nbyte; memmove(buffer, &s->buf[s->head_ptr], size); s->head_ptr += size; } /* no need to wrap around */ else if (s->head_ptr < s->free_ptr) { memmove(buffer, &s->buf[s->head_ptr], size); s->head_ptr += size; } /* need to wrap around, so two reads */ else { u_int tsz; memmove(buffer, &s->buf[s->head_ptr], size); tsz = s->free_ptr; if (tsz > nbyte-size) tsz = nbyte-size; memmove(&((char*)buffer)[size],&s->buf[0], tsz); s->head_ptr = tsz; size += tsz; } s->bytes -= size; yieldlock_release(&s->lock); return size; } /* * write buffer to spipe */ static int spipe_write (S_T(fd_entry) *fd, const void *buffer, int nbyte) { spipe_state_t *s; u_int size; if (fd->type != FD_TYPE_SPIPE) RETERR(V_BADFD); if (fd->state == 0L) RETERR(V_BADFD); if (nbyte < 0) RETERR(V_INVALID); if (nbyte == 0) return 0; s = (spipe_state_t*)fd->state; yieldlock_acquire(&s->lock); /* already closed */ if (s->status == SPIPE_CLOSED) { yieldlock_release(&s->lock); RETERR(V_NOTCONN); } /* if not enough space, return overflow */ if (s->bytes + nbyte > SPIPESZ) { yieldlock_release(&s->lock); RETERR(V_PIPE); } if (s->head_ptr > s->free_ptr) size = s->head_ptr - s->free_ptr; else size = SPIPESZ - s->free_ptr; /* available space w/o wrapping is greater than bytes needed */ if (size >= nbyte) { memmove(&s->buf[s->free_ptr], buffer, nbyte); s->free_ptr += nbyte; if (s->free_ptr == SPIPESZ) s->free_ptr = 0; } /* need to wrap */ else { memmove(&s->buf[s->free_ptr], buffer, size); memmove(&s->buf[0], &((char*)buffer)[size], nbyte-size); s->free_ptr = nbyte-size; } s->bytes += nbyte; dprintf("env %d: spipe: after write on %d, free: %d head: %d, ref %d\n", getpid(),va2ppn((u_int)s),s->free_ptr,s->head_ptr,s->refcnt); yieldlock_release(&s->lock); return nbyte; } /* * close state for the final time (ref cnt expired) */ static int spipe_close_final (S_T(fd_entry) *fd) { spipe_state_t *s; if (fd->type != FD_TYPE_SPIPE) RETERR(V_BADFD); if (fd->state == 0L) RETERR(V_BADFD); s = (spipe_state_t*)fd->state; yieldlock_acquire(&s->lock); if (s->status == SPIPE_CLOSED) { yieldlock_release(&s->lock); return 0; } if (s->refcnt == 0) s->status = SPIPE_CLOSED; dprintf("env %d: spipe: close_final called, pipe status %d\n", getpid(),s->status); yieldlock_release(&s->lock); return 0; } /* * close fd */ static int spipe_close (S_T(fd_entry) *fd) { spipe_state_t *s; if (fd->type != FD_TYPE_SPIPE) RETERR(V_BADFD); if (fd->state == 0L) RETERR(V_BADFD); s = (spipe_state_t*)fd->state; yieldlock_acquire(&s->lock); if (s->status == SPIPE_OPEN) s->refcnt--; dprintf("env %d: spipe: close called, refcnt decremented to %d\n", getpid(), s->refcnt); yieldlock_release(&s->lock); return 0; } /* * open fd */ static int spipe_open (S_T(fd_entry) *fd, const char *name, int flags, mode_t mode) { spipe_state_t *s; if (strncmp(name,"/dev/ipcport",12)==0) { int pid = atoi(&name[12]); if (pid == -1) RETERR(V_BADFD); if (ipcports[envidx(pid)].pte == 0) RETERR(V_BADFD); fd->state = (void*) ipcports[envidx(pid)].va; s = (spipe_state_t*)fd->state; yieldlock_acquire(&s->lock); if (s->status == SPIPE_CLOSED) s->status = SPIPE_OPEN; asm volatile("" ::: "memory"); s->refcnt++; dprintf("env %d: spipe: open: spipe on %d, refcnt incremented to %d\n", getpid(), va2ppn((u_int)s), s->refcnt); dprintf("env %d: spipe: open: spipe is at 0x%x, %d\n", getpid(), (u_int)s, va2ppn((u_int)s)); yieldlock_release(&s->lock); } else RETERR(V_BADFD); return fd->fd; } /* * verify state */ static int spipe_verify(S_T(fd_entry) *fd) { if (fd->type != FD_TYPE_SPIPE) RETERR(V_BADFD); if (fd->state == 0L) RETERR(V_BADFD); return 0; } /* * duplicate fd and state */ static int spipe_incref(S_T(fd_entry) *fd, u_int new_envid) { spipe_state_t *s; if (fd->type != FD_TYPE_SPIPE) RETERR(V_BADFD); if (fd->state == 0L) RETERR(V_BADFD); s = (spipe_state_t*)fd->state; yieldlock_acquire(&s->lock); if (s->status == SPIPE_CLOSED) { yieldlock_release(&s->lock); RETERR(V_NOTCONN); } s->refcnt++; dprintf("env %d: spipe: dup called, spipe on %d, refcnt incremented to %d\n", getpid(), va2ppn((u_int)s), s->refcnt); yieldlock_release(&s->lock); return 0; } static fd_op_t const spipe_ops = { spipe_verify, spipe_incref, spipe_open, spipe_read, spipe_write, NULL, /* readv */ NULL, /* writev */ NULL, /* lseek */ NULL, /* select */ NULL, /* select_pred */ NULL, /* ioctl */ NULL, /* fcntl */ NULL, /* flock */ spipe_close, spipe_close_final, NULL, /* dup */ NULL, /* fstat */ NULL, /* socket */ NULL, /* bind */ NULL, /* connect */ NULL, /* accept */ NULL, /* listen */ NULL, /* sendto */ NULL /* recvfrom */ }; /* call this to initialize spipe fd stuff */ void spipe_init() { register_fd_ops(FD_TYPE_SPIPE, &spipe_ops); }
#include <bits/stdc++.h> using namespace std; typedef long long LL; int main(){ int n, q; scanf("%d%d", &n, &q); LL mid = ((LL)n*n + 1)/2; while(q--){ int y, x; scanf("%d%d", &y, &x); LL ans = (LL)(y-1)*n + x; ans = (ans + 1)/2; if((y+x)%2 == 1) ans += mid; printf("%lld\n", ans); } }
/** * External utility methods used in Ballerina GraphQL listener. */ public final class ListenerUtils { private static final String HTTP_SERVICE = "graphql.http.service"; private static final String GRAPHIQL_SERVICE = "graphql.graphiql.service"; private static final String SAMPLE_URL = "http://localhost:9000/"; public static final String GRAPHIQL_RESOURCE = "graphiql.html"; private static final String REGEX_URL = "${url}"; private static final String FORWARD_SLASH = "/"; private ListenerUtils() {} public static void attachHttpServiceToGraphqlService(BObject graphqlService, BObject httpService) { graphqlService.addNativeData(HTTP_SERVICE, httpService); } public static void attachGraphiqlServiceToGraphqlService(BObject graphqlService, BObject httpService) { graphqlService.addNativeData(GRAPHIQL_SERVICE, httpService); } public static Object getHttpServiceFromGraphqlService(BObject graphqlService) { Object httpService = graphqlService.getNativeData(HTTP_SERVICE); if (httpService instanceof BObject) { return httpService; } return null; } public static Object getGraphiqlServiceFromGraphqlService(BObject graphqlService) { Object graphiqlService = graphqlService.getNativeData(GRAPHIQL_SERVICE); if (graphiqlService instanceof BObject) { return graphiqlService; } return null; } public static Object validateGraphiqlPath(BString path) { String uri = SAMPLE_URL + path; try { new URL(uri).toURI(); return null; } catch (URISyntaxException | MalformedURLException e) { return createError("Invalid path provided for GraphiQL client", ERROR_TYPE); } } public static BString getBasePath(Object serviceName) { if (serviceName instanceof BArray) { List<String> strings = Arrays.stream(((BArray) serviceName).getStringArray()).map( ListenerUtils::unescapeValue).collect(Collectors.toList()); String basePath = String.join(FORWARD_SLASH, strings); return sanitizeBasePath(basePath); } else { String path = ((BString) serviceName).getValue().trim(); if (path.startsWith(FORWARD_SLASH)) { path = path.substring(1); } String[] pathSplits = path.split(FORWARD_SLASH); List<String> strings = Arrays.stream(pathSplits).map(ListenerUtils::unescapeValue).collect(Collectors.toList()); String basePath = String.join(FORWARD_SLASH, strings); return sanitizeBasePath(basePath); } } public static String unescapeValue(String segment) { if (!segment.contains("\\")) { return segment.trim(); } return segment.replace("\\", "").trim(); } public static BString sanitizeBasePath(String basePath) { basePath = basePath.replace("//", FORWARD_SLASH); return StringUtils.fromString(basePath.trim()); } public static Object getHtmlContentFromResources(BString url) { InputStream htmlAsStream = ClassLoader.getSystemResourceAsStream(GRAPHIQL_RESOURCE); try { byte[] bytes = htmlAsStream.readAllBytes(); String htmlAsString = new String(bytes, StandardCharsets.UTF_8); htmlAsString = htmlAsString.replace(REGEX_URL, url.getValue()); return StringUtils.fromString(htmlAsString); } catch (IOException e) { return createError("Error occurred while loading the GraphiQL client", ERROR_TYPE); } } }
<gh_stars>0 package com.carbonsensors.dto; import com.carbonsensors.model.projection.SensorMetrics; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; @AllArgsConstructor @NoArgsConstructor @Getter @ApiModel(description = "Sensor Metrics") public class SensorMetricsDto { @ApiModelProperty(notes = "Max CO2 quantity measurement within the last 30 days", example = "200.0") private Double maxLast30Days; @ApiModelProperty(notes = "CO2 quantity measurement average within the last 30 days", example = "15.5") private Double avgLast30Days; public static SensorMetricsDto fromSensorMetrics(SensorMetrics sensorMetrics) { SensorMetricsDto dto = null; if (sensorMetrics != null) { Double maxLastNDays = sensorMetrics.getMaxLastNDays() != null ? sensorMetrics.getMaxLastNDays() : 0d; Double avgLastNDays = sensorMetrics.getAverageLastNDays() != null ? sensorMetrics.getAverageLastNDays() : 0d; dto = new SensorMetricsDto(maxLastNDays, avgLastNDays); } return dto; } }
Having originally been scheduled to celebrate Thanksgiving by taking to the stratosphere, SpaceX's launch was aborted at the last moment today. The Falcon 9 rocket had been scheduled to take off from Florida's Cape Canaveral Air Force Station sometime during a 65-minute launch window starting at 5:39 PM ET. The aborted launch was initially set to occur earlier this week, but SpaceX missed its last launch window due to a technical glitch. "Saw pressure fluctuations on Falcon boost stage liquid oxygen tank," SpaceX CEO and founder Elon Musk wrote on Twitter at the time. "Want to be super careful, so pushing launch to Thurs." A significant milestone for SpaceX Had today's launch gone smoothly, it could've been a significant milestone both for SpaceX and for the commercial space transportation industry more broadly. Instead, engineers will need to figure out what went wrong before scheduling another launch. Fortunately for both SpaceX and the company's client, the payload doesn't appear to have been damaged. SpaceX's Falcon 9 would have blasted the SES-8 communications satellite — owned by Luxembourg-based SES World Skies — into geostationary orbit. It's the first time SpaceX has launched a commercial communications satellite, and will set the stage for the company to perform subsequent launches that currently take place overseas. "This launch is obviously very important to the future of SpaceX," Musk told reporters on Sunday. "We're very appreciative that SES would place a bet on SpaceX here." So far, the upgraded Falcon 9 (known as version 1.1 of the rocket) hasn't boasted a stellar track record. The 224-foot-tall rocket was initially tested with the launch of a Canadian weather satellite earlier this year. The Falcon 9 completed its mission, but not without failing at a key maneuver that'll be necessary for the SES-8 mission. But even as SpaceX attempts to break into the commercial satellite business, they're already keeping busy with government projects: the company has a $1.6 billion NASA contract to complete 12 cargo resupply flights. Update: As engines were starting and the launch timer was counting down to zero, the Falcon 9 aborted liftoff and shut down its engines. Engineers haven't yet said why — it's quite likely they don't know — but the live feed's announcers say that they're poring through data retrieved from the rocket. Update 2: SpaceX says the rocket is "safe" and that its flight computer automatically shut down the launch with about a second to go. Because it has a 65-minute launch window, engineers are looking at whether they can simply refuel and try again — otherwise they'll have another launch window available tomorrow around the same time of day. Elon Musk has tweeted that everything "Seems ok on closer inspection." Launch aborted by autosequence due to slower than expected thrust ramp. Seems ok on closer inspection. Cycling countdown. — Elon Musk (@elonmusk) November 28, 2013 Update 3: As of 6:22pm ET, the countdown appears to be back on, with the rocket ready to launch about 20 minutes from now. Update 4: The latest launch attempt was aborted just 4 seconds prior to liftoff. Elon Musk tweeted a few minutes prior that it would "probably [be] a few days before next attempt" if this most recent launch was aborted. Indeed, hosts on the SpaceX livestream have confirmed that they are taking "the safe path," with no further plans to launch today. The rocket will be de-tanked and return to horizontal position, with another launch attempt taking place within a few days. Elon Musk confirmed on twitter that they called manual abort, saying it was "better to be paranoid and wrong."
import java.util.Date; public class EmployeeAppraisal { private Date appraisalDate; private int appraisalScore; public EmployeeAppraisal(Date appraisalDate, int appraisalScore) { this.appraisalDate = appraisalDate; this.appraisalScore = appraisalScore; } public Date getAppraisalDate() { return appraisalDate; } public void setAppraisalDate(Date appraisalDate) { this.appraisalDate = appraisalDate; } public int getAppraisalScore() { return appraisalScore; } public void setAppraisalScore(int appraisalScore) { this.appraisalScore = appraisalScore; } @Override public String toString() { return "EmployeeAppraisal [appraisalDate=" + appraisalDate + ", appraisalScore=" + appraisalScore + "]"; } }
/** * Copyright 2015 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.bluelistproxy.internal; import java.io.InputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; import java.util.Random; import java.util.logging.Logger; import org.apache.commons.codec.binary.Hex; import com.cloudant.client.api.Database; import com.ibm.json.java.JSONArray; import com.ibm.json.java.JSONObject; /** * Processes all database permissions related requests. * This includes set permissions (create _users database, add _users database entry for user, * update database permissions for user) and remove permissions (update database permissions for user * and remove _users database entry for user). */ public class PermissionsHandler { private static final String CLASS_NAME = PermissionsHandler.class.getName(); private static final Logger logger = Logger.getLogger(CLASS_NAME); /** * Update permissions for user for this database: * - if _users database does not exist create it and the view * - if entry for user does not exist in _users database, add user * - if user does not have admins permissions for database, add the permissions * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the permissions could not be set. */ public static void setPermissions(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "setPermissions"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); // Start with creating the _users database and proceed to updating the permissions createUsersDatabase(userName, databaseName); logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Remove permissions for user for this database: * - clear admins permissions for database * - remove user from _users database * - clear user from user credentials cache * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the permissions could not be removed. */ public static void removePermissions(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "removePermissions"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); // Initialize flags/exceptions for cleanup in case of error BlueListProxyException rootException = null; try { // Remove this user's access to the database removeUserAccess(userName, databaseName); } catch(BlueListProxyException blpe) { logger.fine("An error occurred removing access to database("+databaseName+") for user ("+userName+"); response error = " + blpe.getMessage()); rootException = blpe; } try { // Delete the user from the user's database deleteUserEntry(userName, databaseName); } catch(BlueListProxyException blpe) { logger.fine("An error occurred removing user ("+userName+") from the _users database; response error = " + blpe.getMessage()); if (rootException == null) rootException = blpe; } // Clear the user credentials from the cache KeyPassManager.getInstance().clearCredentials(userName); // If there was an exception, throw the original exception now if (rootException != null) { throw rootException; } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Create the _users database. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the _users database could not be created. */ public static void createUsersDatabase(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "createUsersDatabase"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); // See if the _users database currently exists boolean dbExists = BlueListProxyUtils.dbExists("_users"); // If the database does not exist, create it and add the view if ( !dbExists ) { // Create the _users database try { logger.fine("Creating database: _users"); KeyPassManager.getInstance().getAdminCloudantClient().createDB("_users"); logger.fine("Created _users database; adding view"); // Create the _users database view createUsersDatabaseView(userName, databaseName); } catch(BlueListProxyException blpe) { logger.severe("Failed to create database (_users); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to create database (_users); error = " + e.getMessage()); throw new BlueListProxyException("Failed to create database (_users)", e); } } // If the database does exist, add/update user else { // Add/update user boolean userDocExists = BlueListProxyUtils.dbDocExists("_users", "org.couchdb.user:" + userName); // If the _users document already exists, update permissions if (userDocExists) { logger.fine("User ("+userName+") _users database info exists; Updating access"); addUserAccess(userName, databaseName); } // If the _users document does not exist, create it else { logger.fine("User ("+userName+") _users database info does not exist; Creating it and adding access"); createUserEntry(userName, databaseName); } } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Create the _users database view. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the _users database view could not be created. */ public static void createUsersDatabaseView(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "createUsersDatabaseView"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String VIEW_NAME = "_design/_imfdata_usersview"; // Create the view try { // Create view with map function Map<String,Object> users = new HashMap<String,Object>(1); users.put("map", "function(doc) {\n emit(doc._id, doc);\n}"); Map<String,Object> views = new HashMap<String,Object>(1); views.put("users", users); Map<String,Object> viewddoc = new HashMap<String,Object>(2); viewddoc.put("_id", VIEW_NAME); viewddoc.put("views", views); logger.fine("Creating _users view: " + VIEW_NAME); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database("_users", false); db.save(viewddoc); logger.fine("Created _users database view"); // Create the user entry createUserEntry(userName, databaseName); } catch(BlueListProxyException blpe) { logger.severe("Failed to create users view for database (_users); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to create users view for database (_users); error = " + e.getMessage()); throw new BlueListProxyException("Failed to create users view for database (_users)", e); } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Create _users database entry for specific user. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the _users database entry could not be created. */ public static void createUserEntry(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "createUserEntry"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String DOC_NAME = "org.couchdb.user:" + userName; // Generate password, salt, and encrypted password byte[] randomBytes = new byte[8]; new Random().nextBytes(randomBytes); String password = Hex.encodeHexString(randomBytes); new Random().nextBytes(randomBytes); String salt = Hex.encodeHexString(randomBytes); String encryptedPass_hex; MessageDigest md; try{ encryptedPass_hex = KeyPassManager.getInstance().getCryptoUtil().encrypt(password, salt); md = MessageDigest.getInstance("SHA1"); md.update((password+salt).getBytes()); } catch(NoSuchAlgorithmException nsae) { logger.severe("Exception caught generating password for user ("+userName+"); exception = " + nsae.getMessage()); throw new BlueListProxyException("Exception caught generating password for user ("+userName+")", nsae); } byte [] password_sha = md.digest(); String password_sha_hex = Hex.encodeHexString(password_sha); // Create request body JSONObject body_credentials = new JSONObject(); body_credentials.put("_id", DOC_NAME); body_credentials.put("name", userName); body_credentials.put("password", <PASSWORD>); body_credentials.put("salt", salt); body_credentials.put("password_sha", password_sha_hex); body_credentials.put("roles", new JSONArray()); body_credentials.put("type", "user"); // Create the _users document try { // Create _users document logger.fine("Creating _users document: " + DOC_NAME); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database("_users", false); db.save(body_credentials); // Add permissions logger.fine("_users database document for user ("+userName+") created; Adding access."); addUserAccess(userName, databaseName); } catch(BlueListProxyException blpe) { logger.severe("Failed to create database document (_users/org.couchdb.user:"+userName+"); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to create database document (_users/org.couchdb.user:"+userName+"); error = " + e.getMessage()); throw new BlueListProxyException("Failed to create database document (_users/org.couchdb.user:"+userName+")", e); } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Update user permissions for database. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the database permissions could not be updated. */ public static void addUserAccess(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "addUserAccess"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String DOC_NAME = "_security"; // Obtain the database security document; it should be there InputStream jsonStream = null; try { // Get security document logger.fine("Retrieving _security document for database " + databaseName); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database(databaseName, false); jsonStream = db.find(DOC_NAME); JSONObject jsonBody = JSONObject.parse(jsonStream); // Determine if the security document for this database already exists boolean existingSecurityDoc = false; if (jsonBody.containsKey("couchdb_auth_only") || jsonBody.containsKey("admins") || jsonBody.containsKey("members")) { existingSecurityDoc = true; } // Update security admins info for this user to give admins access boolean existingMember = false; jsonBody.put("couchdb_auth_only", true); JSONObject admins = (JSONObject)jsonBody.get("admins"); if (admins == null){ admins = new JSONObject(); JSONArray namesArray = new JSONArray(); namesArray.add(userName); admins.put("names", namesArray); jsonBody.put("admins", admins); } else { JSONArray namesArray = (JSONArray)admins.get("names"); if (namesArray != null) { existingMember = namesArray.contains(userName); if (existingMember == false) { namesArray.add(userName); } } else { namesArray = new JSONArray(); namesArray.add(userName); admins.put("names", namesArray); } } // If member does not already exist, then update the permissions if (existingMember == false) { JSONObject members = (JSONObject)jsonBody.get("members"); // Update security members info for this user to give admins access if (members == null && existingSecurityDoc == false) { JSONArray namesArray = new JSONArray(); JSONArray rolesArray = new JSONArray(); rolesArray.add("_admin"); members = new JSONObject(); members.put("names", namesArray); members.put("roles", rolesArray); jsonBody.put("members", members); } Object idObj = jsonBody.get("_id"); if (!(idObj instanceof String)) jsonBody.put("_id", DOC_NAME); // Store the updated document logger.fine("Setting permissions for database: " + databaseName); if (existingSecurityDoc) db.update(jsonBody); else db.save(jsonBody); logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") set successfully."); } // User already exists as member else { logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") already exist; nothing more to do"); } } catch(BlueListProxyException blpe) { logger.severe("Failed to set permissions for database ("+databaseName+"); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to set permissions for database ("+databaseName+"); error = " + e.getMessage()); throw new BlueListProxyException("Failed to set database "+databaseName+" permissions", e); } finally { if (jsonStream != null) { try { jsonStream.close(); } catch(Exception e) {} } } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Delete _users database entry for specific user. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the _users database entry could not be deleted. */ public static void deleteUserEntry(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "deleteUserEntry"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String DOC_NAME = "org.couchdb.user:" + userName; // Get user credentials InputStream jsonStream = null; try { // See if _users document exists if ( BlueListProxyUtils.dbDocExists("_users", DOC_NAME) ) { logger.fine("Retrieving _users document: " + DOC_NAME); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database("_users", false); jsonStream = db.find(DOC_NAME); JSONObject jsonBody = JSONObject.parse(jsonStream); logger.fine("_users database document for user ("+userName+") retrieved; removing it."); db.remove(jsonBody); logger.fine("_users database entry for user ("+userName+") deleted"); } else { logger.fine("_users database entry for user ("+userName+") does not exist; nothing more to do"); } } catch(BlueListProxyException blpe) { logger.severe("Failed to delete _users database document ("+DOC_NAME+"); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to delete _users database document ("+DOC_NAME+"); error = " + e.getMessage()); throw new BlueListProxyException("Failed to delete _users database document ("+DOC_NAME+")", e); } finally { if (jsonStream != null) { try { jsonStream.close(); } catch(Exception e) {} } } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Remove user permissions for database. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the database permissions could not be removed. */ public static void removeUserAccess(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "removeUserAccess"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String DOC_NAME = "_security"; // Obtain the database security document; it should be there InputStream jsonStream = null; try { // See if _security document exists if ( BlueListProxyUtils.dbDocExists(databaseName, DOC_NAME) ) { // Get security document logger.fine("Retrieving _security document for database " + databaseName); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database(databaseName, false); jsonStream = db.find(DOC_NAME); JSONObject jsonBody = JSONObject.parse(jsonStream); // Update security admins info for this user to give admins access boolean existingMember = false; JSONObject admins = (JSONObject)jsonBody.get("admins"); if (admins != null){ JSONArray namesArray = (JSONArray)admins.get("names"); existingMember = namesArray.contains(userName); if (existingMember == true) { namesArray.remove(userName); } } // If member exists, then update the permissions if (existingMember == true) { Object idObj = jsonBody.get("_id"); if (!(idObj instanceof String)) jsonBody.put("_id", DOC_NAME); Object revObj = jsonBody.get("_rev"); // Store the updated document logger.fine("Updating _security document for database: " + databaseName); if (revObj instanceof String) db.update(jsonBody); else db.save(jsonBody); logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") updated successfully."); } // User already does not exist as member else { logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") does not exist; nothing more to do"); } } // User already does not exist as member else { logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") do not exist; nothing more to do"); } } catch(BlueListProxyException blpe) { logger.severe("Failed to remove permissions for database ("+databaseName+"); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to remove permissions for database ("+databaseName+"); error = " + e.getMessage()); throw new BlueListProxyException("Failed to remove database "+databaseName+" permissions", e); } finally { if (jsonStream != null) { try { jsonStream.close(); } catch(Exception e) {} } } logger.exiting(CLASS_NAME, METHOD_NAME); } }
/* * SRM 186 Div2 * Link: */ public class SRM686SegmentsAndPoints { public static void main(String[] args) { System.out.println(isPossible(new int[]{1, 2},new int[]{0,0},new int[]{1,3})); System.out.println(isPossible(new int[]{0},new int[]{2},new int[]{3})); System.out.println(isPossible(new int[]{0, 1, 2},new int[]{0, 0, 1},new int[]{1, 2, 1})); System.out.println(isPossible(new int[]{0, 1},new int[]{-1, 0},new int[]{0, 0})); System.out.println(isPossible(new int[]{434, 63, 241, 418, -380, -46, 397, -205, -262, -282, 260, -106, -389, -286, 422, -75, 127, 382, 52, -383}, new int[]{-447, -226, -411, 287, -83, -228, -390, 358, 422, 395, -461, -112, 49, 75, -160, -152, 372, -447, -337, -362}, new int[]{-102, 348, -70, 466, 168, -61, -389, 469, 433, 471, -75, -41, 52, 236, 299, -48, 383, -353, 346, -217})); } public static String isPossible(int[] p, int[] l, int[] r){ HashMap<Integer, Boolean> pointMap=new HashMap<>(); for (int i = 0; i < p.length; i++) { pointMap.put(p[i], false); } for (int i = 0; i < l.length; i++) { for (int j =l[i] ; j <=r[i]; j++) { if(pointMap.containsKey(j)) pointMap.put(j, true); } } int count=0; for (int i = 0; i < p.length; i++) { if(pointMap.get(p[i])==true) count++; } return count==p.length?"Possible":"Impossible"; } }
import os, os.path import shlex import sys from . import debug, info, warning, error, fatal from .tools import * def _path_sub(src, dest, subst='"${src}"'): if src in dest: b, e = dest.split(src, 1) while e.startswith(os.path.sep): e = e[1:] ps = [] y = ps.append if b: y(b) y(subst) if e: y(e) return os.path.join(*ps) else: return shlex.quote(dest) def _move(src, dest): assert os.path.exists(src) if os.path.exists(dest): assert os.path.isdir(dest) if os.path.relpath(src) == os.path.relpath(dest): return '' shsrc = shlex.quote(src) shdest = _path_sub(src, dest) syntax = '''src={shsrc} dest={shdest} [[ -d "$dest" ]] || mkdir -p "$dest" [[ -d "$src" ]] && $MV -t "$dest" "$src"/*.* || file_error "$src" '''.format(**locals()) return syntax def hier_arrange(*args, prefix='', init='', **kwargs): if not args: args = ('.',) if args == ('.',): fargs = '' else: fargs = ' '.join(shlex.quote(a) for a in args) do_sort = kwargs.pop('do_sort', True) chunks = chunk(*args, **kwargs) # returns a list of (size, (src, dest)) with dest=None for no change if not chunks: raise StopIteration total_size = sum(s for s, _ in chunks) if prefix: try: if prefix.format(0) == prefix: prefix += '{}' except: raise ValueError("Poorly-formed prefix string {}".format(prefix)) if os.path.sep not in prefix: prefix += os.path.sep if init: yield init else: yield '''#! /bin/bash set -e function file_error() { echo "$@" not a directory, ignoring >&2; } ''' if sys.platform.startswith('darwin'): yield '''MV="gmv -nt" FIND=gfind ''' #elif sys.platform.startswith('win32'): # ... else: yield '''MV="mv -nv" FIND=find # {:.1f} MB in {} volumes '''.format(total_size/10E6, len(chunks)) yield '''# $FIND {fargs} \( -name .DS_Store -o -iname Thumbs.DB -o -empty \) -delete # $FIND {fargs} -empty -delete '''.format(**locals()) for n, (size, pairs) in enumerate(chunks, start=1): if prefix: vol_root = prefix.format(n) yield ''' ### Volume {n}: {size:,} bytes vol_root={vol_root} '''.format(**locals()) for src, dest in pairs: if dest and do_sort: dest = vol_root+os.path.sep+dest else: dest = vol_root+os.path.sep+src yield _move(src, dest) else: for src, dest in pairs: if dest: # dest can be None if re-sorting not needed yield _move(src, dest) if not init: yield ''' $FIND {fargs} -empty -delete '''.format(**locals()) def arrange_dirs(*args, fileout='', **kwargs): def _get_lines(*args, **kwargs): ha = list(hier_arrange(*args, **kwargs)) # heir_arrange is a generator of syntax lines if ha: yield from ha if hasattr(fileout, 'write'): debug("Writing to {}".format(fileout)) fileout.write(os.linesep.join(_get_lines(*args, **kwargs))) elif isinstance(fileout, int): with open(fileout, 'w') as fo: return arrange_dirs(*args, fileout=fo, **kwargs) elif isinstance(fileout, str) and fileout: with open(fileout, 'w') as fo: return arrange_dirs(*args, fileout=fo, **kwargs) else: if fileout: warning("'{}' invalid, writing to standard out".format(fileout)) print('\n'.join(_get_lines(*args, **kwargs)) )
<gh_stars>0 import { Injectable } from '@angular/core'; import { Observable } from 'rxjs/Observable'; import { TranslateService } from '@ngx-translate/core'; import { IPermissionRole } from '../permissions.interface'; import { IPermissionsTreeNode } from './permissions-tree.interface'; import { ITreeNode } from '../../../../shared/components/flowtree/treenode/treenode.interface'; import { DataService } from '../../../../core/data/data.service'; import { menuConfig } from '../../../menu-config'; @Injectable() export class PermissionsTreeService { constructor( private dataService: DataService, private translateService: TranslateService ) {} load(currentRole: IPermissionRole, selection: ITreeNode[]): Observable<ITreeNode[]> { return this.dataService.readAll('/roles/{id}/guiobjects', currentRole) .map(data => this.convertToTreeNodes(data, selection)); } save(currentRole: IPermissionRole, removed: ITreeNode[], added: ITreeNode[]): Observable<any> { const data = { objects: [ ...added.map(node => ({ id: node.id, value: true })), ...removed.map(node => ({ id: node.id, value: false })) ] }; return this.update(currentRole, data); } getDiff(nodes: ITreeNode[], nodes2: ITreeNode[]): ITreeNode[] { return nodes.filter(node1 => !node1.children && !nodes2.find(node2 => node1 === node2)); } private update(currentRole: IPermissionRole, data: object): Observable<any> { return this.dataService.update('/roles/{id}/guiobjects', currentRole, data); } private convertToTreeNodes(permissions: IPermissionsTreeNode[], selection: ITreeNode[]): ITreeNode[] { return permissions .map(permission => { const node = this.convertToTreeNode(permission, selection); if (permission.value) { selection.push(node); } return node; }); } private convertToTreeNode(permission: IPermissionsTreeNode, selection: ITreeNode[]): ITreeNode { const hasChildren = permission.children && permission.children.length > 0; const cfg = menuConfig.hasOwnProperty(permission.name) ? menuConfig[permission.name] : null; return { id: permission.id, label: this.translateService.instant(cfg && cfg.text ? cfg.text : permission.name) || permission.name, expanded: hasChildren, children: hasChildren ? this.convertToTreeNodes(permission.children, selection) : undefined, data: permission, icon: hasChildren ? 'fa fa-folder-o' : '', // icon: hasChildren ? 'fa fa-folder-o' : (cfg ? cfg.icon : ''), expandedIcon: hasChildren ? 'fa fa-folder-open-o' : '' }; } }
/** * Class that runs the book creation */ @SpringBootApplication public class Main { public static void main( String[] args ) { SpringApplication.run( Main.class, args ); } }
/** Term-related classes for INSERT JSON support. */ public class Json { public static final ObjectMapper JSON_OBJECT_MAPPER = new ObjectMapper(); public static final JsonStringEncoder JSON_STRING_ENCODER = new JsonStringEncoder(); public static final ColumnIdentifier JSON_COLUMN_ID = new ColumnIdentifier("[json]", true); public static Object decodeJson(String json) { try { return JSON_OBJECT_MAPPER.readValue(json, Object.class); } catch (IOException exc) { throw new MarshalException("Error decoding JSON string: " + exc.getMessage()); } } public interface Raw { public Prepared prepareAndCollectMarkers(CFMetaData metadata, Collection<ColumnDefinition> receivers, VariableSpecifications boundNames); } /** * Represents a literal JSON string in an INSERT JSON statement. * For example: INSERT INTO mytable (key, col) JSON '{"key": 0, "col": 0}'; */ public static class Literal implements Raw { private final String text; public Literal(String text) { this.text = text; } public Prepared prepareAndCollectMarkers(CFMetaData metadata, Collection<ColumnDefinition> receivers, VariableSpecifications boundNames) { return new PreparedLiteral(parseJson(text, receivers)); } } /** * Represents a marker for a JSON string in an INSERT JSON statement. * For example: INSERT INTO mytable (key, col) JSON ?; */ public static class Marker implements Raw { protected final int bindIndex; public Marker(int bindIndex) { this.bindIndex = bindIndex; } public Prepared prepareAndCollectMarkers(CFMetaData metadata, Collection<ColumnDefinition> receivers, VariableSpecifications boundNames) { boundNames.add(bindIndex, makeReceiver(metadata)); return new PreparedMarker(bindIndex, receivers); } private ColumnSpecification makeReceiver(CFMetaData metadata) { return new ColumnSpecification(metadata.ksName, metadata.cfName, JSON_COLUMN_ID, UTF8Type.instance); } } /** * A prepared, full set of JSON values. */ public static abstract class Prepared { public abstract Term.Raw getRawTermForColumn(ColumnDefinition def); } /** * A prepared literal set of JSON values */ private static class PreparedLiteral extends Prepared { private final Map<ColumnIdentifier, Term> columnMap; public PreparedLiteral(Map<ColumnIdentifier, Term> columnMap) { this.columnMap = columnMap; } public Term.Raw getRawTermForColumn(ColumnDefinition def) { Term value = columnMap.get(def.name); return value == null ? Constants.NULL_LITERAL : new ColumnValue(value); } } /** * A prepared bind marker for a set of JSON values */ private static class PreparedMarker extends Prepared { private final int bindIndex; private final Collection<ColumnDefinition> columns; public PreparedMarker(int bindIndex, Collection<ColumnDefinition> columns) { this.bindIndex = bindIndex; this.columns = columns; } public RawDelayedColumnValue getRawTermForColumn(ColumnDefinition def) { return new RawDelayedColumnValue(this, def); } } /** * A Terminal for a single column. * * Note that this is intrinsically an already prepared term, but this still implements Term.Raw so that we can * easily use it to create raw operations. */ private static class ColumnValue extends Term.Raw { private final Term term; public ColumnValue(Term term) { this.term = term; } @Override public Term prepare(String keyspace, ColumnSpecification receiver) throws InvalidRequestException { return term; } @Override public TestResult testAssignment(String keyspace, ColumnSpecification receiver) { return TestResult.NOT_ASSIGNABLE; } public String getText() { return term.toString(); } } /** * A Raw term for a single column. Like ColumnValue, this is intrinsically already prepared. */ private static class RawDelayedColumnValue extends Term.Raw { private final PreparedMarker marker; private final ColumnDefinition column; public RawDelayedColumnValue(PreparedMarker prepared, ColumnDefinition column) { this.marker = prepared; this.column = column; } @Override public Term prepare(String keyspace, ColumnSpecification receiver) throws InvalidRequestException { return new DelayedColumnValue(marker, column); } @Override public TestResult testAssignment(String keyspace, ColumnSpecification receiver) { return TestResult.WEAKLY_ASSIGNABLE; } public String getText() { return marker.toString(); } } /** * A NonTerminal for a single column. As with {@code ColumnValue}, this is intrinsically a prepared. */ private static class DelayedColumnValue extends Term.NonTerminal { private final PreparedMarker marker; private final ColumnDefinition column; public DelayedColumnValue(PreparedMarker prepared, ColumnDefinition column) { this.marker = prepared; this.column = column; } @Override public void collectMarkerSpecification(VariableSpecifications boundNames) { // We've already collected what we should (and in practice this method is never called). } @Override public boolean containsBindMarker() { return true; } @Override public Terminal bind(QueryOptions options) throws InvalidRequestException { Term term = options.getJsonColumnValue(marker.bindIndex, column.name, marker.columns); return term == null ? null : term.bind(options); } @Override public Iterable<Function> getFunctions() { return Collections.emptyList(); } } /** * Given a JSON string, return a map of columns to their values for the insert. */ public static Map<ColumnIdentifier, Term> parseJson(String jsonString, Collection<ColumnDefinition> expectedReceivers) { try { Map<String, Object> valueMap = JSON_OBJECT_MAPPER.readValue(jsonString, Map.class); if (valueMap == null) throw new InvalidRequestException("Got null for INSERT JSON values"); handleCaseSensitivity(valueMap); Map<ColumnIdentifier, Term> columnMap = new HashMap<>(expectedReceivers.size()); for (ColumnSpecification spec : expectedReceivers) { Object parsedJsonObject = valueMap.remove(spec.name.toString()); if (parsedJsonObject == null) { columnMap.put(spec.name, null); } else { try { columnMap.put(spec.name, spec.type.fromJSONObject(parsedJsonObject)); } catch(MarshalException exc) { throw new InvalidRequestException(String.format("Error decoding JSON value for %s: %s", spec.name, exc.getMessage())); } } } if (!valueMap.isEmpty()) { throw new InvalidRequestException(String.format( "JSON values map contains unrecognized column: %s", valueMap.keySet().iterator().next())); } return columnMap; } catch (IOException exc) { throw new InvalidRequestException(String.format("Could not decode JSON string as a map: %s. (String was: %s)", exc.toString(), jsonString)); } catch (MarshalException exc) { throw new InvalidRequestException(exc.getMessage()); } } /** * Handles unquoting and case-insensitivity in map keys. */ public static void handleCaseSensitivity(Map<String, Object> valueMap) { for (String mapKey : new ArrayList<>(valueMap.keySet())) { // if it's surrounded by quotes, remove them and preserve the case if (mapKey.startsWith("\"") && mapKey.endsWith("\"")) { valueMap.put(mapKey.substring(1, mapKey.length() - 1), valueMap.remove(mapKey)); continue; } // otherwise, lowercase it if needed String lowered = mapKey.toLowerCase(Locale.US); if (!mapKey.equals(lowered)) valueMap.put(lowered, valueMap.remove(mapKey)); } } }
GOCAD TSurf 1 HEADER { name:ETRA-SBMT-WEST-Waterman_Canyon_fault-CFM5 mesh:false ivolmap:false imap:false parts:false *solid*color:0.196078 0.803922 0.196078 1 border:true *border*width:2 *solid*ambient:0.411765 0.411765 0.411765 1 *solid*specular:0 0.501961 0 1 *border*color:0.662745 0.662745 0.662745 1 *border*bstone:false *solid*transparency:0 } GOCAD_ORIGINAL_COORDINATE_SYSTEM NAME Default AXIS_NAME "X" "Y" "Z" AXIS_UNIT "m" "m" "m" ZPOSITIVE Elevation END_ORIGINAL_COORDINATE_SYSTEM PROPERTIES is_latest is_CFM5 PROP_LEGAL_RANGES **none** **none** **none** **none** NO_DATA_VALUES -99999 -99999 PROPERTY_CLASSES is_latest is_cfm5 PROPERTY_KINDS unknown unknown PROPERTY_SUBCLASSES QUANTITY Float QUANTITY Float ESIZES 1 1 UNITS none none PROPERTY_CLASS_HEADER Z { is_z:on } TFACE PVRTX 1 489732.849609375 3783478.09375 1458 -99999 -99999 PVRTX 2 489522.244140625 3782768.796875 -10427.099609375 -99999 -99999 PVRTX 3 488458.66015625 3783710.609375 -10152.099609375 -99999 -99999 PVRTX 4 489680.19921875 3783300.765625 -1513.27490234375 -99999 -99999 PVRTX 5 489627.546875 3783123.4375 -4484.5498046875 -99999 -99999 PVRTX 6 489574.89453125 3782946.125 -7455.82421875 -99999 -99999 PVRTX 7 489414.302734375 3783536.21875 -1444.52490234375 -99999 -99999 PVRTX 8 489095.75390625 3783594.34375 -4347.0498046875 -99999 -99999 PVRTX 9 488777.20703125 3783652.484375 -7249.57421875 -99999 -99999 PVRTX 10 489267.201171875 3783932.21875 1448 -99999 -99999 PVRTX 11 489065.06640625 3783876.8125 -1452.02490234375 -99999 -99999 PVRTX 12 488862.9296875 3783821.40625 -4352.0498046875 -99999 -99999 PVRTX 13 488660.794921875 3783766.015625 -7252.07421875 -99999 -99999 PVRTX 14 487201.361328125 3784693.796875 -10064.25 -99999 -99999 PVRTX 15 488750.7421875 3784122.609375 -1430.0625 -99999 -99999 PVRTX 16 488234.28125 3784313 -4308.125 -99999 -99999 PVRTX 17 487717.8203125 3784503.40625 -7186.1875 -99999 -99999 PVRTX 18 488669.265625 3784419.90625 1733 -99999 -99999 PVRTX 19 488302.2890625 3784488.375 -1216.3125 -99999 -99999 PVRTX 20 487935.3125 3784556.84375 -4165.625 -99999 -99999 PVRTX 21 487568.337890625 3784625.328125 -7114.9375 -99999 -99999 PVRTX 22 488403.224609375 3784882.59375 1755 -99999 -99999 PVRTX 23 488102.7578125 3784835.390625 -1199.8125 -99999 -99999 PVRTX 24 487802.29296875 3784788.1875 -4154.625 -99999 -99999 PVRTX 25 487501.828125 3784741 -7109.4375 -99999 -99999 PVRTX 26 487835.46875 3785265.21875 1821 -99999 -99999 PVRTX 27 487676.94140625 3785122.359375 -1150.3125 -99999 -99999 PVRTX 28 487518.4140625 3784979.5 -4121.625 -99999 -99999 PVRTX 29 487359.888671875 3784836.65625 -7092.9375 -99999 -99999 PVRTX 30 487107.259765625 3785623.140625 1547 -99999 -99999 PVRTX 31 487130.78515625 3785390.8125 -1355.8125 -99999 -99999 PVRTX 32 487154.310546875 3785158.46875 -4258.625 -99999 -99999 PVRTX 33 487177.8359375 3784926.125 -7161.4375 -99999 -99999 PVRTX 34 486490.328125 3785689.703125 1460 -99999 -99999 PVRTX 35 486668.0859375 3785440.71875 -1421.0625 -99999 -99999 PVRTX 36 486845.84375 3785191.75 -4302.125 -99999 -99999 PVRTX 37 487023.603515625 3784942.78125 -7183.1875 -99999 -99999 PVRTX 38 485705.953125 3785842.359375 1288 -99999 -99999 PVRTX 39 486079.8046875 3785555.21875 -1550.0625 -99999 -99999 PVRTX 40 486453.65625 3785268.078125 -4388.125 -99999 -99999 PVRTX 41 486827.509765625 3784980.9375 -7226.1875 -99999 -99999 PVRTX 42 485495.34765625 3785133.0625 -10597.099609375 -99999 -99999 PVRTX 43 485653.30078125 3785665.03125 -1683.27490234375 -99999 -99999 PVRTX 44 485600.650390625 3785487.71875 -4654.5498046875 -99999 -99999 PVRTX 45 485548 3785310.390625 -7625.82421875 -99999 -99999 PVRTX 46 483860.74609375 3785391.4375 -10478.4375 -99999 -99999 PVRTX 47 485244.65234375 3785729.625 -1653.609375 -99999 -99999 PVRTX 48 484783.349609375 3785616.90625 -4595.21875 -99999 -99999 PVRTX 49 484322.046875 3785504.171875 -7536.828125 -99999 -99999 PVRTX 50 484725.15234375 3786042.796875 1432 -99999 -99999 PVRTX 51 484509.05078125 3785879.953125 -1545.609375 -99999 -99999 PVRTX 52 484292.94921875 3785717.125 -4523.21875 -99999 -99999 PVRTX 53 484076.84765625 3785554.28125 -7500.828125 -99999 -99999 PVRTX 54 483537.912109375 3786162.40625 1336 -99999 -99999 PVRTX 55 483618.62109375 3785969.65625 -1617.609375 -99999 -99999 PVRTX 56 483699.328125 3785776.921875 -4571.21875 -99999 -99999 PVRTX 57 483780.037109375 3785584.1875 -7524.828125 -99999 -99999 PVRTX 58 482785.7421875 3786218.15625 1270 -99999 -99999 PVRTX 59 483054.4921875 3786011.46875 -1667.109375 -99999 -99999 PVRTX 60 483323.244140625 3785804.796875 -4604.21875 -99999 -99999 PVRTX 61 483591.99609375 3785598.125 -7541.328125 -99999 -99999 PVRTX 62 482033.61328125 3786306.46875 1252 -99999 -99999 PVRTX 63 482490.396484375 3786077.71875 -1680.609375 -99999 -99999 PVRTX 64 482947.1796875 3785848.953125 -4613.21875 -99999 -99999 PVRTX 65 483403.962890625 3785620.1875 -7545.828125 -99999 -99999 PVRTX 66 481823.0078125 3785597.171875 -10633.099609375 -99999 -99999 PVRTX 67 481980.9609375 3786129.140625 -1719.27490234375 -99999 -99999 PVRTX 68 481928.310546875 3785951.8125 -4690.5498046875 -99999 -99999 PVRTX 69 481875.66015625 3785774.5 -7661.82421875 -99999 -99999 PVRTX 70 481220.03125 3786274.765625 1256 -99999 -99999 PVRTX 71 481370.775390625 3786105.375 -1716.27490234375 -99999 -99999 PVRTX 72 481521.51953125 3785935.96875 -4688.5498046875 -99999 -99999 PVRTX 73 481672.263671875 3785766.5625 -7660.82421875 -99999 -99999 PVRTX 74 480009.5234375 3785580.34375 -10572.099609375 -99999 -99999 PVRTX 75 480917.404296875 3786101.15625 -1701.02490234375 -99999 -99999 PVRTX 76 480614.77734375 3785927.5625 -4658.0498046875 -99999 -99999 PVRTX 77 480312.150390625 3785753.953125 -7615.07421875 -99999 -99999 PVRTX 78 480220.12890625 3786289.640625 1313 -99999 -99999 PVRTX 79 480167.4765625 3786112.3125 -1658.27490234375 -99999 -99999 PVRTX 80 480114.826171875 3785935 -4629.5498046875 -99999 -99999 PVRTX 81 480062.17578125 3785757.671875 -7600.82421875 -99999 -99999 PVRTX 82 478392.65234375 3785666.75 -10538.099609375 -99999 -99999 PVRTX 83 479763.259765625 3786133.921875 -1649.77490234375 -99999 -99999 PVRTX 84 479306.390625 3785978.1875 -4612.5498046875 -99999 -99999 PVRTX 85 478849.521484375 3785822.46875 -7575.32421875 -99999 -99999 PVRTX 86 478603.2578125 3786376.046875 1347 -99999 -99999 PVRTX 87 478550.60546875 3786198.71875 -1624.27490234375 -99999 -99999 PVRTX 88 478497.955078125 3786021.40625 -4595.5498046875 -99999 -99999 PVRTX 89 478445.3046875 3785844.078125 -7566.82421875 -99999 -99999 PVRTX 90 477494.4921875 3786299.9375 1436 -99999 -99999 PVRTX 91 477719.03125 3786141.640625 -1557.52490234375 -99999 -99999 PVRTX 92 477943.572265625 3785983.34375 -4551.0498046875 -99999 -99999 PVRTX 93 478168.11328125 3785825.046875 -7544.57421875 -99999 -99999 PVRTX 94 476345.5 3786079.5 1052 -99999 -99999 PVRTX 95 476857.2890625 3785976.3125 -1845.52490234375 -99999 -99999 PVRTX 96 477369.076171875 3785873.125 -4743.0498046875 -99999 -99999 PVRTX 97 477880.86328125 3785769.9375 -7640.57421875 -99999 -99999 PVRTX 98 477283.88671875 3785590.640625 -10449.099609375 -99999 -99999 PVRTX 99 476580.09765625 3785957.28125 -1823.27490234375 -99999 -99999 PVRTX 100 476814.6953125 3785835.0625 -4698.5498046875 -99999 -99999 PVRTX 101 477049.2890625 3785712.859375 -7573.82421875 -99999 -99999 PVRTX 102 476134.89453125 3785370.203125 -10833.099609375 -99999 -99999 PVRTX 103 476292.84765625 3785902.171875 -1919.27490234375 -99999 -99999 PVRTX 104 476240.1953125 3785724.84375 -4890.5498046875 -99999 -99999 PVRTX 105 476187.546875 3785547.53125 -7861.82421875 -99999 -99999 PVRTX 106 474924.40234375 3785333.5 -10627.099609375 -99999 -99999 PVRTX 107 475990.2265625 3785893 -1867.77490234375 -99999 -99999 PVRTX 108 475634.953125 3785706.5 -4787.5498046875 -99999 -99999 PVRTX 109 475279.67578125 3785520 -7707.32421875 -99999 -99999 PVRTX 110 475135.0078125 3786042.796875 1258 -99999 -99999 PVRTX 111 475082.35546875 3785865.46875 -1713.27490234375 -99999 -99999 PVRTX 112 475029.703125 3785688.15625 -4684.5498046875 -99999 -99999 PVRTX 113 474977.0546875 3785510.828125 -7655.82421875 -99999 -99999 PVRTX 114 473876.203125 3786072.625 981 -99999 -99999 PVRTX 115 474138.25390625 3785887.84375 -1921.02490234375 -99999 -99999 PVRTX 116 474400.3046875 3785703.0625 -4823.0498046875 -99999 -99999 PVRTX 117 474662.3515625 3785518.28125 -7725.07421875 -99999 -99999 PVRTX 118 472889.0234375 3786279.328125 969 -99999 -99999 PVRTX 119 473397.8671875 3786042.875 -1930.02490234375 -99999 -99999 PVRTX 120 473906.7109375 3785806.40625 -4829.0498046875 -99999 -99999 PVRTX 121 474415.55859375 3785569.953125 -7728.07421875 -99999 -99999 PVRTX 122 473665.59765625 3785363.328125 -10904.099609375 -99999 -99999 PVRTX 123 473083.16796875 3786050.328125 -1999.27490234375 -99999 -99999 PVRTX 124 473277.3125 3785821.328125 -4967.5498046875 -99999 -99999 PVRTX 125 473471.453125 3785592.328125 -7935.82421875 -99999 -99999 PVRTX 126 472537.54296875 3785592.6875 -10916.125 -99999 -99999 PVRTX 127 472801.15234375 3786107.671875 -2002.28125 -99999 -99999 PVRTX 128 472713.28125 3785936 -4973.5625 -99999 -99999 PVRTX 129 472625.4140625 3785764.34375 -7944.84375 -99999 -99999 PVRTX 130 471853.828125 3786399.515625 972 -99999 -99999 PVRTX 131 472024.7578125 3786197.8125 -2000.03125 -99999 -99999 PVRTX 132 472195.6875 3785996.09375 -4972.0625 -99999 -99999 PVRTX 133 472366.61328125 3785794.390625 -7944.09375 -99999 -99999 PVRTX 134 471182.515625 3785726.84375 -10916.1875 -99999 -99999 PVRTX 135 471686 3786231.34375 -2000.046875 -99999 -99999 PVRTX 136 471518.171875 3786063.1875 -4972.09375 -99999 -99999 PVRTX 137 471350.34375 3785895.015625 -7944.140625 -99999 -99999 PVRTX 138 470667.01171875 3786474.78125 871 -99999 -99999 PVRTX 139 470795.88671875 3786287.796875 -2075.796875 -99999 -99999 PVRTX 140 470924.765625 3786100.8125 -5022.59375 -99999 -99999 PVRTX 141 471053.640625 3785913.828125 -7969.390625 -99999 -99999 PVRTX 142 469716.66796875 3786520.828125 858 -99999 -99999 PVRTX 143 470083.12890625 3786322.328125 -2085.546875 -99999 -99999 PVRTX 144 470449.59375 3786123.84375 -5029.09375 -99999 -99999 PVRTX 145 470816.0546875 3785925.34375 -7972.640625 -99999 -99999 PVRTX 146 469506.0625 3785811.53125 -11027.099609375 -99999 -99999 PVRTX 147 469664.015625 3786343.5 -2113.27490234375 -99999 -99999 PVRTX 148 469611.3671875 3786166.1875 -5084.5498046875 -99999 -99999 PVRTX 149 469558.71484375 3785988.859375 -8055.82421875 -99999 -99999 PVRTX 150 469087.16796875 3786400.71875 835 -99999 -99999 PVRTX 151 469191.890625 3786253.421875 -2130.52490234375 -99999 -99999 PVRTX 152 469296.6171875 3786106.125 -5096.0498046875 -99999 -99999 PVRTX 153 469401.33984375 3785958.828125 -8061.57421875 -99999 -99999 PVRTX 154 468273.37890625 3786248.796875 806 -99999 -99999 PVRTX 155 468581.55078125 3786139.484375 -2152.27490234375 -99999 -99999 PVRTX 156 468889.71875 3786030.15625 -5110.5498046875 -99999 -99999 PVRTX 157 469197.890625 3785920.84375 -8068.82421875 -99999 -99999 PVRTX 158 468123.640625 3785632.9375 -11073.5625 -99999 -99999 PVRTX 159 468235.9453125 3786094.828125 -2163.890625 -99999 -99999 PVRTX 160 468198.5078125 3785940.875 -5133.78125 -99999 -99999 PVRTX 161 468161.07421875 3785786.90625 -8103.671875 -99999 -99999 PVRTX 162 467320.84765625 3786393.734375 827 -99999 -99999 PVRTX 163 467521.546875 3786203.53125 -2148.140625 -99999 -99999 PVRTX 164 467722.2421875 3786013.34375 -5123.28125 -99999 -99999 PVRTX 165 467922.94140625 3785823.140625 -8098.421875 -99999 -99999 PVRTX 166 466902.7734375 3785740.265625 -11106.125 -99999 -99999 PVRTX 167 467216.328125 3786230.375 -2156.28125 -99999 -99999 PVRTX 168 467111.8125 3786067 -5139.5625 -99999 -99999 PVRTX 169 467007.29296875 3785903.625 -8122.84375 -99999 -99999 PVRTX 170 466414.6796875 3786560.328125 776 -99999 -99999 PVRTX 171 466536.703125 3786355.3125 -2194.53125 -99999 -99999 PVRTX 172 466658.7265625 3786150.296875 -5165.0625 -99999 -99999 PVRTX 173 466780.75 3785945.28125 -8135.59375 -99999 -99999 TRGL 62 58 63 TRGL 172 169 173 TRGL 54 50 51 TRGL 79 76 80 TRGL 96 92 93 TRGL 138 130 135 TRGL 12 16 15 TRGL 58 59 63 TRGL 80 84 83 TRGL 34 30 31 TRGL 22 23 27 TRGL 95 92 96 TRGL 96 100 95 TRGL 64 68 67 TRGL 3 14 17 TRGL 38 39 43 TRGL 129 126 133 TRGL 163 159 164 TRGL 140 141 145 TRGL 100 101 104 TRGL 47 48 52 TRGL 68 69 73 TRGL 131 127 132 TRGL 56 52 53 TRGL 150 147 151 TRGL 16 17 21 TRGL 10 11 15 TRGL 38 43 47 TRGL 29 14 33 TRGL 91 87 92 TRGL 101 98 105 TRGL 139 140 144 TRGL 39 40 43 TRGL 115 111 112 TRGL 33 14 37 TRGL 42 46 49 TRGL 116 112 113 TRGL 165 158 169 TRGL 143 144 147 TRGL 117 106 121 TRGL 87 83 84 TRGL 95 91 92 TRGL 149 146 153 TRGL 125 126 129 TRGL 31 27 28 TRGL 116 113 117 TRGL 130 118 131 TRGL 101 105 104 TRGL 5 8 4 TRGL 110 94 111 TRGL 109 106 113 TRGL 105 109 104 TRGL 140 137 141 TRGL 102 106 105 TRGL 151 148 152 TRGL 131 136 135 TRGL 105 106 109 TRGL 30 26 27 TRGL 141 134 145 TRGL 164 168 167 TRGL 73 74 77 TRGL 103 108 107 TRGL 55 52 56 TRGL 97 98 101 TRGL 170 167 171 TRGL 58 54 55 TRGL 74 82 85 TRGL 114 115 119 TRGL 24 25 29 TRGL 80 76 77 TRGL 150 142 147 TRGL 58 55 59 TRGL 51 47 52 TRGL 23 24 28 TRGL 41 45 44 TRGL 107 108 112 TRGL 115 112 116 TRGL 20 16 21 TRGL 145 149 148 TRGL 88 89 93 TRGL 59 55 56 TRGL 11 7 8 TRGL 59 56 60 TRGL 13 17 16 TRGL 1 4 7 TRGL 94 99 103 TRGL 40 37 41 TRGL 93 82 97 TRGL 31 28 32 TRGL 71 76 75 TRGL 40 44 43 TRGL 50 38 51 TRGL 153 146 157 TRGL 95 100 99 TRGL 24 21 25 TRGL 158 166 169 TRGL 94 95 99 TRGL 70 71 75 TRGL 12 13 16 TRGL 21 14 25 TRGL 118 123 127 TRGL 6 3 9 TRGL 43 44 47 TRGL 59 60 64 TRGL 65 46 69 TRGL 23 19 20 TRGL 115 116 120 TRGL 64 65 68 TRGL 163 164 167 TRGL 28 24 29 TRGL 40 36 37 TRGL 72 76 71 TRGL 125 129 124 TRGL 86 78 83 TRGL 90 86 91 TRGL 86 87 91 TRGL 22 18 19 TRGL 118 119 123 TRGL 128 129 133 TRGL 140 136 137 TRGL 120 121 124 TRGL 65 69 68 TRGL 73 77 72 TRGL 38 34 35 TRGL 132 137 136 TRGL 138 135 139 TRGL 5 9 8 TRGL 151 147 148 TRGL 11 12 15 TRGL 14 42 45 TRGL 142 143 147 TRGL 82 98 97 TRGL 164 165 168 TRGL 64 60 65 TRGL 78 79 83 TRGL 86 83 87 TRGL 145 134 149 TRGL 98 102 105 TRGL 81 85 84 TRGL 40 41 44 TRGL 85 82 89 TRGL 32 29 33 TRGL 132 136 131 TRGL 63 64 67 TRGL 152 153 157 TRGL 79 75 76 TRGL 144 148 147 TRGL 122 126 125 TRGL 114 111 115 TRGL 35 32 36 TRGL 15 16 20 TRGL 62 63 67 TRGL 36 33 37 TRGL 152 148 149 TRGL 72 77 76 TRGL 134 146 149 TRGL 78 75 79 TRGL 12 8 9 TRGL 10 1 7 TRGL 37 14 41 TRGL 35 31 32 TRGL 120 116 121 TRGL 78 70 75 TRGL 138 139 143 TRGL 87 84 88 TRGL 45 49 48 TRGL 94 103 107 TRGL 41 14 45 TRGL 25 14 29 TRGL 80 81 84 TRGL 116 117 121 TRGL 157 158 161 TRGL 111 107 112 TRGL 100 104 103 TRGL 156 161 160 TRGL 10 7 11 TRGL 130 131 135 TRGL 143 139 144 TRGL 170 162 167 TRGL 94 91 95 TRGL 165 169 168 TRGL 171 168 172 TRGL 79 80 83 TRGL 62 67 71 TRGL 27 23 28 TRGL 12 9 13 TRGL 30 27 31 TRGL 67 68 72 TRGL 144 145 148 TRGL 162 163 167 TRGL 88 85 89 TRGL 151 152 156 TRGL 53 46 57 TRGL 60 61 65 TRGL 127 128 132 TRGL 104 108 103 TRGL 23 20 24 TRGL 2 3 6 TRGL 38 47 51 TRGL 10 15 19 TRGL 18 10 19 TRGL 156 152 157 TRGL 118 127 131 TRGL 121 106 125 TRGL 94 90 91 TRGL 106 122 125 TRGL 44 48 47 TRGL 36 32 33 TRGL 32 28 29 TRGL 81 74 85 TRGL 46 66 69 TRGL 161 158 165 TRGL 162 154 163 TRGL 144 140 145 TRGL 80 77 81 TRGL 70 62 71 TRGL 154 155 159 TRGL 124 128 123 TRGL 39 35 36 TRGL 108 109 113 TRGL 119 120 123 TRGL 164 160 165 TRGL 171 167 168 TRGL 112 108 113 TRGL 156 160 155 TRGL 39 36 40 TRGL 13 3 17 TRGL 66 74 73 TRGL 56 53 57 TRGL 69 66 73 TRGL 52 48 53 TRGL 99 100 103 TRGL 142 138 143 TRGL 55 51 52 TRGL 123 128 127 TRGL 92 88 93 TRGL 137 134 141 TRGL 121 125 124 TRGL 48 49 53 TRGL 159 160 164 TRGL 150 151 155 TRGL 139 135 136 TRGL 94 107 111 TRGL 154 159 163 TRGL 155 160 159 TRGL 146 158 157 TRGL 57 46 61 TRGL 45 42 49 TRGL 172 168 169 TRGL 169 166 173 TRGL 19 15 20 TRGL 24 20 21 TRGL 4 8 7 TRGL 22 19 23 TRGL 38 35 39 TRGL 118 114 119 TRGL 155 151 156 TRGL 26 22 27 TRGL 54 51 55 TRGL 124 129 128 TRGL 114 110 111 TRGL 96 101 100 TRGL 160 161 165 TRGL 77 74 81 TRGL 34 31 35 TRGL 157 161 156 TRGL 6 9 5 TRGL 104 109 108 TRGL 133 134 137 TRGL 96 93 97 TRGL 120 124 123 TRGL 60 56 57 TRGL 61 46 65 TRGL 49 46 53 TRGL 154 150 155 TRGL 113 106 117 TRGL 17 14 21 TRGL 132 128 133 TRGL 139 136 140 TRGL 72 68 73 TRGL 89 82 93 TRGL 88 84 85 TRGL 11 8 12 TRGL 152 149 153 TRGL 133 137 132 TRGL 44 45 48 TRGL 60 57 61 TRGL 126 134 133 TRGL 9 3 13 TRGL 71 67 72 TRGL 119 115 120 TRGL 63 59 64 TRGL 87 88 92 TRGL 97 101 96 BSTONE 1 BORDER 174 1 10 END
<filename>mmic_pdb/__init__.py """ mmic_pdb MMIC for extracting and fixing PDB files converted to MMSchema molecules """ # Add imports here from .models import * from .components import * # Handle versioneer from ._version import get_versions versions = get_versions() __version__ = versions["version"] __git_revision__ = versions["full-revisionid"] del get_versions, versions
/** * Wrapper method to update a Blog Post * * @param post {BlogPost} * @param blogHandle * @throws ClientServicesException */ public BlogPost updateBlogPost(BlogPost post, String blogHandle) throws ClientServicesException { if (null == post){ throw new ClientServicesException(null,"null post"); } if(post.getFieldsMap().get(AtomXPath.title)== null) post.setTitle(post.getTitle()); if(post.getFieldsMap().get(AtomXPath.content)== null) post.setContent(post.getContent()); if(!post.getFieldsMap().toString().contains(AtomXPath.tags.toString())) post.setTags(post.getTags()); BlogPostSerializer serializer = new BlogPostSerializer(post); String payload = serializer.updatePayload(); String updatePostUrl = BlogUrls.UPDATE_REMOVE_POST.format(this, BlogUrlParts.blogHandle.get(blogHandle), BlogUrlParts.entryAnchor.get(post.getUid())); Response response = updateData(updatePostUrl, null, payload, null); checkResponseCode(response, HTTPCode.OK); post = getBlogPostFeedHandler().createEntity(response); return post; }
def locate_config(self): for dirpath, dirnames, files in os.walk(os.path.expanduser('~'), topdown=False): if self.config_file in files: self.path_to_config = os.path.join(dirpath, self.config_file) break else: self.path_to_config = '' return self.path_to_config
Following last year’s Lauryn Hill Incident at Electric Forest, the String Cheese Incident have decided to mix it up again this year by playing a 90 minute dubstep set. The band claims they want to expose old school String Cheese Fans to something new that they would really enjoy. Bill Nershi said of the decision, “We’ve been looking for a fresh idea for this year’s Forest. I was strumming on my guitar up in Boulder, CO looking out at the mountains the other day, when suddenly the idea came to me. I ran inside, tossed my guitar, and started making tracks we could use on Abelton Live!” “In recent years, I can attribute a lot of my sound to visionaries like Skrillex, Bassnectar, and Datsik,” he continued. “I’m pleased that they have all agreed to sit in on this truly unique set, along with many more surprise guest DJs!” Nershi was leading the charge on this. He’s always secretly led the charge. The timing was finally right for full… Posted by Jason Hann on Wednesday, April 1, 2015 [Via April Fools]
export * from './accountModels' export * from './cryptoModels' export * from './generalModels' export * from './transactionModels' export * from './chainActionTypeModels' export * from './ethStructures'
#include <iostream> #include <string> #include <cstring> #include <fstream> #include <functional> #include <algorithm> #include <ctime> #include <cmath> #include <vector> #include <queue> #include <map> #include <sstream> #include <unordered_set> #include <unordered_map> #include <sstream> #include <set> #include <cassert> using namespace std; #ifndef ONLINE_JUDGE ifstream in("/home/ffbh/CLionProjects/acm/InOutput/input.txt"); #endif #ifdef ONLINE_JUDGE istream& in = cin; #endif typedef long long LL; #define pii pair<int,int> #define pll pair<LL,LL> #define mp make_pair #define pb push_back #define lson (root<<1) #define rson (root<<1|1) int n; pair<pii,int> d[300010]; void input(){ in>>n; for(int i=0;i<n;++i) { in >> d[i].first.first >> d[i].first.second; d[i].first.second = -d[i].first.second; d[i].second = i+1; } } int main(){ int TEST_CASE = 1; //in >> TEST_CASE; while (TEST_CASE-- > 0){ input(); sort(d,d+n); int mmax = -1; int pj = -1,pi = -1; for(int i=0;i<n;++i){ d[i].first.second = -d[i].first.second; if(d[i].first.second > mmax){ pi = d[i].second; mmax = d[i].first.second; } else{ pj = d[i].second; break; } } if(pj != -1) { swap(pi,pj); cout << pi << " " << pj << endl; } else{ cout<<"-1 -1"<<endl; } } return 0; }
// ForAllNoShrink1 creates a property that requires the check condition to be true for all values // As the name suggests the generated values will not be shrunk if the condition falsiies func ForAllNoShrink1(gen gopter.Gen, check func(interface{}) (interface{}, error)) gopter.Prop { return gopter.SaveProp(func(genParams *gopter.GenParameters) *gopter.PropResult { genResult := gen(genParams) value, ok := genResult.Retrieve() if !ok { return &gopter.PropResult{ Status: gopter.PropUndecided, } } valueFormated := fmt.Sprintf("%+v", value) return convertResult(check(value)).AddArgs(gopter.NewPropArg(genResult, 0, value, valueFormated, value, valueFormated)) }) }
N = int(input()) def question(i): print(i) s = input() if s == 'Male': return 0 elif s == 'Female': return 1 exit() a = question(0) A = ([a, 1 - a] * N)[:N] lb = -1 rb = N while True: mid = (lb + rb) // 2 if question(mid) == A[mid]: lb = mid else: rb = mid
// Init sets up all and creates routes func Init(e *echo.Echo) { h := &handler.Handler{} appConfig := config.GetConfig() if appConfig.Auth.Enabled { h.AuthInit(e) } e.Use(h.IPRestrict) g := e.Group("/go") g.Use(middleware.StaticWithConfig(middleware.StaticConfig{ Root: "public", HTML5: true, })) e.GET("/", func(c echo.Context) error { return c.Redirect(http.StatusTemporaryRedirect, "/go") }) e.GET("/health", func(c echo.Context) error { return c.JSON(http.StatusOK, map[string]interface{}{ "UP": true, }) }) e.GET("/opensearch.xml", h.Opensearch) e.GET("/:key", h.Url) e.GET("/*", h.Url) e.POST("/:key", h.CreateUrl) e.PUT("/:key", h.UpdateUrl) e.GET("/api/search", h.Search) e.GET("/api/search/suggest", h.SearchSuggestions) e.GET("/api/popular", h.Popular) e.GET("/api/url/:key", h.GetURL) if appConfig.Slack.SigningSecret != "" { e.POST("/api/slack", h.SlackCommand) } setupTemplates(e) if appConfig.Slack.Token != "" { s := &slackbot.SlackBot{} go s.Init() } }
/** * Encodes a Gatekeeper message as a properties object, with all signature requests identified * with a unique zero-based index number. * * @return * all the properties of the message. */ public Properties encodeToProperties() { if (log.isDebugEnabled()) { log.debug("Encoding GatekeeperMessage to properties"); } Properties encodedProperties = new Properties(); Iterator iter = null; String prefix = "application"; iter = applicationProperties.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); String key = (String) entry.getKey(); String value = (String) entry.getValue(); encodeProperty(encodedProperties, prefix + DELIM + key, value); } prefix = "message"; iter = messageProperties.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); String key = (String) entry.getKey(); String value = (String) entry.getValue(); encodeProperty(encodedProperties, prefix + DELIM + key, value); } prefix = "request"; SignatureRequest[] requests = getSignatureRequests(); for (int i = 0; i < requests.length; i++) { SignatureRequest request = requests[i]; String propertyPrefix = prefix + DELIM + i + DELIM; encodeProperty(encodedProperties, propertyPrefix + "signatureType", request.getSignatureType()); encodeProperty(encodedProperties, propertyPrefix + "objectKey", request.getObjectKey()); encodeProperty(encodedProperties, propertyPrefix + "bucketName", request.getBucketName()); encodeProperty(encodedProperties, propertyPrefix + "signedUrl", request.getSignedUrl()); encodeProperty(encodedProperties, propertyPrefix + "declineReason", request.getDeclineReason()); propertyPrefix += "metadata" + DELIM; Map metadata = request.getObjectMetadata(); iter = metadata.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); String metadataName = (String) entry.getKey(); Object metadataValue = entry.getValue(); encodeProperty(encodedProperties, propertyPrefix + metadataName, metadataValue); } } return encodedProperties; }
use core::position::{Size, HasSize}; use core::cellbuffer::CellAccessor; use ui::core::{ Alignable, HorizontalAlign, VerticalAlign, Widget, Frame, Painter, }; /// Display text to widgets /// /// # Examples /// /// ``` /// use rustty::ui::core::{VerticalAlign, HorizontalAlign, Widget}; /// use rustty::ui::{Dialog, Label}; /// /// let mut maindlg = Dialog::new(60, 10); /// /// let mut label = Label::from_str("Hi, this is an example!"); /// label.pack(&maindlg, HorizontalAlign::Middle, VerticalAlign::Middle, (0,0)); /// /// maindlg.add_label(label); /// maindlg.draw_box(); /// ``` /// pub struct Label { frame: Frame, text: Vec<String>, x: usize, y: usize, t_halign: HorizontalAlign, t_valign: VerticalAlign, t_margin: (usize, usize) } impl Label { /// Construct a new Label widget `cols` wide by `rols` high. Initial text is empty /// and left aligned /// /// # Examples /// /// ``` /// use rustty::ui::Label; /// /// let mut label = Label::new(60, 10); /// ``` /// pub fn new(cols: usize, rows: usize) -> Label { Label { frame: Frame::new(cols, rows), text: Vec::new(), x: 0, y: 0, t_halign: HorizontalAlign::Left, t_valign: VerticalAlign::Middle, t_margin: (0, 0), } } /// Construct a new label widget from an existing string *s*. *s* can either be a /// `&str` or `String` , and a label will be constructed that is the size of the /// length of characters in *s*. Text is left aligned by default /// /// # Examples /// /// ``` /// use rustty::ui::Label; /// /// let mut label1 = Label::from_str("This is a label"); // label is size (15x1) /// /// let s = "Here's another label".to_string(); /// let mut label2 = Label::from_str(s); // label is size (20x1) /// ``` /// pub fn from_str<S: Into<String>>(s: S) -> Label { let s = s.into(); Label { frame: Frame::new(s.len(), 1), text: vec![s.into()], x: 0, y: 0, t_halign: HorizontalAlign::Left, t_valign: VerticalAlign::Middle, t_margin: (0, 0), } } /// Construct a new label widget from an existing string *s*. *s* can either be a /// `&str` or `String` , and a label will be constructed that is the size of the /// length of characters in *s*. Text is left aligned by default /// /// # Examples /// /// ``` /// use rustty::ui::Label; /// /// let mut label1 = Label::from_str("This is a label"); // label is size (15x1) /// /// let s = "Here's another label".to_string(); /// let mut label2 = Label::from_str(s); // label is size (20x1) /// ``` /// pub fn from_str_ref(s: &str) -> Label { Label { frame: Frame::new(s.len(), 1), text: vec![s.to_string()], x: 0, y: 0, t_halign: HorizontalAlign::Left, t_valign: VerticalAlign::Middle, t_margin: (0, 0), } } /// Specify a custom alignment for the text within the widget. Each line /// drawn within the label will adhere to the alignments passed for the /// text. *note that text alignment is with respect to the *label* /// /// # Examples /// /// ``` /// use rustty::ui::core::{HorizontalAlign, VerticalAlign}; /// use rustty::ui::Label; /// /// let mut label = Label::new(20, 3); /// label.set_text("Centered"); /// label.align_text(HorizontalAlign::Middle, VerticalAlign::Middle, (0,0)); /// ``` /// pub fn align_text(&mut self, halign: HorizontalAlign, valign: VerticalAlign, margin: (usize, usize)) { self.t_halign = halign; self.t_valign = valign; self.t_margin = margin; } /// Set the text of the widget to the passed `&str` or `String`. If the /// widget does not have enough room to display the new text, the label /// will only show the truncated text. *resize()* must be called to extend /// the size of the label. /// /// # Examples /// /// ``` /// use rustty::HasSize; /// use rustty::ui::core::Widget; /// use rustty::ui::Label; /// /// let mut label1 = Label::new(20, 3); /// label1.set_text("Initial text"); /// ``` /// pub fn set_text<S: Into<String>>(&mut self, new_str: S) { let (framex, _) = self.frame.size(); self.text = Vec::new(); let mut parse = new_str.into(); let mut line = String::new(); // This loop below will accomplish splitting a line of text // into lines that adhere to the amount of rows in a label loop { // Look for a word until a whitespace is reached if let Some(loc) = parse.find(char::is_whitespace) { let line_len = line.len(); let tmp = parse[..loc].to_owned(); // If the word can fit on the current line, add it if line_len + tmp.len() + self.t_margin.0 < framex { line.push_str(&tmp); } else { line = line.trim_right().to_owned(); self.text.push(line); line = tmp.to_owned(); } parse = parse[loc..].to_owned(); } else { // If no whitespace detected, there may still be one // more word so attempt to add it if parse.len() != 0 { let line_len = line.len(); if line_len + parse.len() + self.t_margin.0 < framex { line.push_str(&parse); self.text.push(line); } else { self.text.push(line); self.text.push(parse); } } break; } // Look for the range of spaces between words if let Some(loc) = parse.find(|c: char| c != ' ') { let line_len = line.len(); let tmp = parse[..loc].to_owned(); // If the next word can fit on the current line, do so if line_len + tmp.len() + self.t_margin.0 < framex { line.push_str(&tmp); } else { line = line.trim_right().to_owned(); self.text.push(line); line = "".to_string(); } parse = parse[loc..].to_owned(); } else { // We don't care if there's spaces at the end, so don't check break; } } } } impl Widget for Label { fn draw(&mut self, parent: &mut CellAccessor) { // For every line to be written, align it correctly as defined by the user in // align_text, if not this text will be left and middle aligned by default for (i, item) in self.text.iter().enumerate() { self.x = self.frame.halign_line(&item, self.t_halign.clone(), self.t_margin.0); self.y = self.frame.valign_line(&item, self.t_valign.clone(), self.t_margin.1); self.frame.printline(self.x, self.y + i, &item); } self.frame.draw_into(parent); } fn pack(&mut self, parent: &HasSize, halign: HorizontalAlign, valign: VerticalAlign, margin: (usize, usize)) { self.frame.align(parent, halign, valign, margin); } fn draw_box(&mut self) { self.frame.draw_box(); } fn resize(&mut self, new_size: Size) { self.frame.resize(new_size); } fn frame(&self) -> &Frame { &self.frame } fn frame_mut(&mut self) -> &mut Frame { &mut self.frame } }
#include<bits/stdc++.h> using namespace std; typedef long long ll; #define mp make_pair #define pb push_back typedef vector<int> vi; typedef pair<int ,int> vii; typedef vector<pair<int,int>> vpii; typedef vector<ll> vll; typedef vector<pair<ll,ll>> vpll; typedef pair<ll,ll> pll; typedef pair<int,int> pii; #define fo(i,n) for(int i=0;i<n;i++) #define all(x) x.begin(),x.end() #define forstl(it,v) for(auto it:v){cout<<it<<" ";} #define sall(x) (sort(all(x))) #define F first #define S second #define MOD 1000000007 #define printdouble(x) cout << fixed << setprecision(20) << x ll _gcd(ll a, ll b){return a==0?b:_gcd(b%a,a);} ll _lcm(ll a,ll b){return a/_gcd(a,b)*b;} #define goog(tno) cout<<"Case #"<<tno<<": "; void solve(); ll n; pair<ll,ll>dp[(1<<21)]; int pows[100001]; bool done = 0; int power10(int n){ if(!done){ pows[0] = 1; for(int i = 1; i <= 100000; i++) pows[i] = (pows[i-1] * 10LL) % MOD; done = 1; } return pows[n]; } int main() { ios::sync_with_stdio(0); cin.tie(NULL); #ifndef ONLINE_JUDGE freopen("in1.txt","r",stdin); freopen("out1.txt","w",stdout); #endif int t=1; //cin>>t; while(t--) { solve(); } } void solve() { const int maxn=2e5+5,INF=0x3f3f3f3f; int a[maxn]; int last[maxn]; int n; while(cin>>n) { memset(last,INF,sizeof(last)); for(int i=0;i<n;i++) { cin>>a[i]; last[a[i]]=i; } sort(last,last+maxn); cout<<a[last[0]]<<endl; } }
<reponame>UUDigitalHumanitieslab/timealign<gh_stars>1-10 from django.views import generic from .models import Post class PostList(generic.ListView): model = Post class PostDetail(generic.DetailView): model = Post
import protobuf, { Root } from 'protobufjs'; import { getAllMethods, mockResponse } from './mock'; import { OptionType } from './interface'; export declare function printTypescript(json: protobuf.INamespace, options: OptionType): string; export declare function parseProtoRoot(root: Root, options: OptionType, packageName?: string): string; export declare function parseProto(source: string, _options?: OptionType): string; export { getAllMethods, mockResponse }; declare const _default: { parseProto: typeof parseProto; parseProtoRoot: typeof parseProtoRoot; getAllMethods: typeof getAllMethods; mockResponse: typeof mockResponse; }; export default _default;
<reponame>iamGreedy/gltf2 package gltf2 import ( "github.com/iamGreedy/essence/req" "github.com/pkg/errors" "image" "image/draw" "image/jpeg" "image/png" "path" "path/filepath" ) type Image interface { Name() string ExtensionStructure Extras() *Extras Load(useCache bool) (img *image.RGBA, err error) Cache() *image.RGBA ThrowCache() IsCached() bool UserData() interface{} SetUserData(data interface{}) } type URIImage struct { // nullable cache *image.RGBA // URI *URI // name string extensions *Extensions extras *Extras // None spec userData interface{} } func (s *URIImage) GetExtension() *Extensions { return s.extensions } func (s *URIImage) SetExtension(extensions *Extensions) { s.extensions = extensions } func (s *URIImage) UserData() interface{} { return s.userData } func (s *URIImage) SetUserData(data interface{}) { s.userData = data } func (s *URIImage) Name() string { return s.name } func (s *URIImage) Extensions() *Extensions { return s.extensions } func (s *URIImage) Extras() *Extras { return s.extras } func (s *URIImage) Load(useCache bool) (img *image.RGBA, err error) { if s.IsCached() { return s.Cache(), nil } // setup 'img' rdc, err := req.Standard.Request(s.URI.Data()) if err != nil { return nil, err } defer rdc.Close() // image decode temp, _, err := image.Decode(rdc) if err != nil { return nil, err } // image move img = image.NewRGBA(temp.Bounds()) draw.Draw(img, img.Rect, temp, temp.Bounds().Min, draw.Src) // cache if useCache { // setup cache s.cache = img } return img, nil } func (s *URIImage) Cache() *image.RGBA { return s.cache } func (s *URIImage) ThrowCache() { s.cache = nil } func (s *URIImage) IsCached() bool { return s.cache != nil } type BufferImage struct { // nullable cache *image.RGBA // Mime MimeType BufferView *BufferView // name string extensions *Extensions extras *Extras // None spec userData interface{} } func (s *BufferImage) GetExtension() *Extensions { return s.extensions } func (s *BufferImage) SetExtension(extensions *Extensions) { s.extensions = extensions } func (s *BufferImage) UserData() interface{} { return s.userData } func (s *BufferImage) SetUserData(data interface{}) { s.userData = data } func (s *BufferImage) Name() string { return s.name } func (s *BufferImage) Extensions() *Extensions { return s.extensions } func (s *BufferImage) Extras() *Extras { return s.extras } func (s *BufferImage) Load(useCache bool) (img *image.RGBA, err error) { rd, err := s.BufferView.LoadReader() if err != nil { return nil, err } var temp image.Image switch s.Mime { case ImagePNG: temp, err = png.Decode(rd) if err != nil { return nil, err } case ImageJPEG: temp, err = jpeg.Decode(rd) if err != nil { return nil, err } } // img = image.NewRGBA(temp.Bounds()) draw.Draw(img, img.Rect, temp, temp.Bounds().Min, draw.Src) // return img, nil } func (s *BufferImage) Cache() *image.RGBA { return s.cache } func (s *BufferImage) ThrowCache() { s.cache = nil } func (s *BufferImage) IsCached() bool { return s.cache != nil } type SpecImage struct { URI *URI `json:"URI"` // exclusive_require(URI, bufferView) MimeType *MimeType `json:"mimeType"` // BufferView *SpecGLTFID `json:"bufferView"` // exclusive_require(URI, bufferView), dependency(MimeType) Name *string `json:"name,omitempty"` Extensions *SpecExtensions `json:"extensions,omitempty"` Extras *Extras `json:"extras,omitempty"` } func (s *SpecImage) SpecExtension() *SpecExtensions { return s.Extensions } func (s *SpecImage) Scheme() string { return SCHEME_IMAGE } func (s *SpecImage) Syntax(strictness Strictness, root Specifier, parent Specifier) error { switch strictness { case LEVEL3: fallthrough case LEVEL2: fallthrough case LEVEL1: if (s.URI != nil && s.BufferView != nil) || (s.URI == nil && s.BufferView == nil) { return errors.Errorf("Image must have one of 'Image.URI' or 'Image.bufferView'") } if s.BufferView != nil && s.MimeType == nil { return errors.Errorf("Image.bufferView dependency(MimeType)") } } return nil } func (s *SpecImage) To(ctx *parserContext) interface{} { if s.URI != nil { res := new(URIImage) res.URI = s.URI if res.URI != nil { switch res.URI.Scheme { case "": fallthrough case "file": dir := ctx.Directory() if dir == "" { dir = "." } res.URI.Path = filepath.Join(dir, filepath.FromSlash(path.Clean("/"+res.URI.Path))) } } if s.Name != nil { res.name = *s.Name } res.extras = s.Extras return res } if s.BufferView != nil { res := new(BufferImage) //res.BufferView = s.BufferView res.Mime = *s.MimeType if s.Name != nil { res.name = *s.Name } res.extras = s.Extras return res } panic("Unreachable") } func (s *SpecImage) Link(Root *GLTF, parent interface{}, dst interface{}) error { if bi, ok := dst.(BufferImage); ok { if !inRange(*s.BufferView, len(Root.BufferViews)) { return errors.Errorf("Image.BufferView linking fail") } bi.BufferView = Root.BufferViews[*s.BufferView] } return nil }
// ParseUuid parses a 32 digit hexadecimal number (that might contain hyphens) // representing an UUID. func ParseUuid(input string) (*UUID, error) { var u UUID i := 0 for _, r := range input { switch { case r == '-' && i&1 == 0: continue case r >= '0' && r <= '9' && i < 32: u[i/2] |= byte(r-'0') << uint(4-i&1*4) case r >= 'a' && r <= 'f' && i < 32: u[i/2] |= byte(r-'a'+10) << uint(4-i&1*4) case r >= 'A' && r <= 'F' && i < 32: u[i/2] |= byte(r-'A'+10) << uint(4-i&1*4) default: return nil, fmt.Errorf("invalid UUID: %q", input) } i += 1 } if i != 32 { return nil, fmt.Errorf("invalid UUID: %q", input) } return &u, nil }
<reponame>zorro-del-caribe/config-loader import {test} from 'zora'; import {load as config} from '../src/index'; test('load config: should return a getter function', t => { const conf = config({directory: './test/config'}); t.equal(conf('foo.prop'), 'value', 'should return the value for ./test/config/foo.js'); t.equal(conf('bar.another.thing'), 'yeah', 'should work on nested value'); }); test('throws error if can not find the conf value', t => { const conf = config({directory: './test/config'}); t.throws(() => conf('foo.bar'), 'missing "bar" part in the config'); }); test('throw error if modify conf in strict mode', t => { 'use strict'; const conf = config({directory: './test/config'}); t.throws(() => { conf('bar.another').thing = 'woot'; }, TypeError); }); test('return undefined if invalid path with non strict flag', t => { const conf = config({directory: './test/config', strict: false}); t.eq(conf('foo.bar'), undefined, 'should be undefined'); });
tree :: Char tree = '#' main :: IO () main = do inputs <- lines <$> readFile "input.txt" let answer1 = solve inputs 1 3 print answer1 let result1 = solve inputs 1 1 let result2 = solve inputs 1 3 let result3 = solve inputs 1 5 let result4 = solve inputs 1 7 let result5 = solve inputs 2 1 let answer2 = result1 * result2 * result3 * result4 * result5 print answer2 solve :: [String] -> Int -> Int -> Int solve inputs di ri = solve 0 0 0 where stepLength = length $ head inputs bottomCoord = length inputs step d r = (inputs!!d!!mod r stepLength) == tree solve d r c | d >= bottomCoord = c | otherwise = solve (d + di) (r + ri) (if step d r then c + 1 else c)
<gh_stars>1-10 package com.dangxy.androidpractice.custom; import android.content.Context; import android.graphics.Canvas; import android.graphics.Paint; import android.support.v7.widget.AppCompatTextView; import android.text.Layout.Alignment; import android.text.StaticLayout; import android.text.TextPaint; import android.util.AttributeSet; /** * @description 描述 * @author dangxy99 * @date 2018/3/6 */ public class CenterTextView extends AppCompatTextView { private StaticLayout myStaticLayout; private TextPaint tp; public CenterTextView(Context context, AttributeSet attrs) { super(context, attrs); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); initView(); } private void initView() { tp = new TextPaint(Paint.ANTI_ALIAS_FLAG); tp.setTextSize(getTextSize()); tp.setColor(getCurrentTextColor()); myStaticLayout = new StaticLayout(getText(), tp, getWidth(), Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); } @Override protected void onDraw(Canvas canvas) { myStaticLayout.draw(canvas); } }
// Called when the command is initially scheduled. void AlignTarget::Initialize() { Limelight::setLights(true); intake->setExtended(true); aligner.Reset(); successes = 0; fails = 0; startTime = std::chrono::steady_clock::now(); }
/** * This is a wrapper for Template specific information. It's part of * the RunData object and can extract the information it needs to do * the job directly from the data.getParameters(). * * @author <a href="mailto:[email protected]">Dave Bryson</a> * @author <a href="mailto:[email protected]">Jason van Zyl</a> * @author <a href="mailto:[email protected]">Henning P. Schmiedehausen</a> * @version $Id$ */ public class TemplateInfo { /** Constants for tempStorage hash map. */ public static final String NAVIGATION_TEMPLATE = "00navigation_template00"; /** Constants for tempStorage hash map. */ public static final String LAYOUT_TEMPLATE = "00layout_template00"; /** Constants for tempStorage hash map. */ public static final String SERVICE_NAME = "template_service"; /* Handle to the RunData object. */ private RunData data = null; /* Place to store information about templates. */ private Map<String, Object> tempStorage = null; /** * Constructor * * @param data A Turbine RunData object. */ public TemplateInfo(RunData data) { this.data = data; tempStorage = new HashMap<>(10); } /** * Get the value of navigationTemplate. * * @return A String with the value of navigationTemplate. */ public String getNavigationTemplate() { return getString(TemplateInfo.NAVIGATION_TEMPLATE); } /** * Set the value of navigationTemplate. * * @param v Value to assign to navigationTemplate. */ public void setNavigationTemplate(String v) { setTemp(TemplateInfo.NAVIGATION_TEMPLATE, v); } /** * Get the value of screen for the RunData parameters. This * information comes from PathInfo or a QueryString. * * @return A String with the value of screen. */ public String getScreenTemplate() { return data.getParameters().getString(URIConstants.CGI_TEMPLATE_PARAM, null); } /** * Set the value of screen. This is really just a method to hide * using the RunData Parameter. * * @param v Value to assign to screen. */ public void setScreenTemplate(String v) { data.getParameters().setString(URIConstants.CGI_TEMPLATE_PARAM, v); // We have changed the screen template so // we should now update the layout template // as well. We will use the template service // to help us out. try { TemplateService templateService = (TemplateService)TurbineServices.getInstance().getService(TemplateService.SERVICE_NAME); setLayoutTemplate(templateService.getLayoutTemplateName(v)); } catch (Exception e) { /* * do nothing. */ } } /** * Get the value of layout. * * @return A String with the value of layout. */ public String getLayoutTemplate() { String value = getString(TemplateInfo.LAYOUT_TEMPLATE); return value; } /** * Set the value of layout. * * @param v Value to assign to layout. */ public void setLayoutTemplate(String v) { setTemp(TemplateInfo.LAYOUT_TEMPLATE, v); } /** * Get the value of Template context. This will be cast to the * proper Context by its Service. * * @param name The name of the template context. * @return An Object with the Value of context. */ public Object getTemplateContext(String name) { return getTemp(name); } /** * Set the value of context. * * @param name The name of the template context. * @param v Value to assign to context. */ public void setTemplateContext(String name, Object v) { setTemp(name, v); } /** * Get the value of service. * * @return A String with the value of service. */ public String getService() { return getString(TemplateInfo.SERVICE_NAME); } /** * Set the value of service. * * @param v Value to assign to service. */ public void setService(String v) { setTemp(TemplateInfo.SERVICE_NAME, v); } /** * Get an object from temporary storage. * * @param name A String with the name of the object. * @return An Object. */ public Object getTemp(String name) { return tempStorage.get(name); } /** * Get an object from temporary storage, or a default value. * * @param name A String with the name of the object. * @param def An Object, the default value. * @return An Object. */ public Object getTemp(String name, Object def) { try { Object val = tempStorage.get(name); return (val != null) ? val : def; } catch (Exception e) { return def; } } /** * Put an object into temporary storage. * * @param name A String with the name of the object. * @param value An Object, the value. */ public void setTemp(String name, Object value) { tempStorage.put(name, value); } /** * Return a String[] from the temp hash map. * * @param name A String with the name of the object. * @return A String[]. */ public String[] getStringArray(String name) { String[] value = null; Object object = getTemp(name, null); if (object != null) { value = (String[]) object; } return value; } /** * Return a String from the temp hash map. * * @param name A String with the name of the object. * @return A String. */ public String getString(String name) { String value = null; Object object = getTemp(name, null); if (object != null) { value = (String) object; } return value; } /** * Remove an object from the temporary storage. * * @param name A String with the name of the object. * @return The object that was removed or <code>null</code> * if the name was not a key. */ public Object removeTemp(String name) { return tempStorage.remove(name); } /** * Returns all the available names in the temporary storage. * * @return A object array with the keys. */ public Object[] getTempKeys() { return tempStorage.keySet().toArray(); } }
<reponame>alrusov/workers package workers import ( "sync/atomic" "testing" "time" "github.com/alrusov/misc" ) //----------------------------------------------------------------------------------------------------------------------------// func TestComplexZero(t *testing.T) { testComplex(t, 0) } func TestComplexOne(t *testing.T) { testComplex(t, 1) } func TestComplexSmall(t *testing.T) { testComplex(t, 10) } func TestComplexBig(t *testing.T) { testComplex(t, 1000000) } func testComplex(t *testing.T, n int) { w, err := New(nil) if err == nil { t.Fatalf("New(nil): Expected error but not found") } if w != nil { t.Fatalf("New(nil): not null worker returned") } //-------------------------------// check := func(fn string, maxWorker int, multithreadedThreshold int) { tw := &testWorker{ data: make([]int, n), clean: false, withDelay: false, } for i := 0; i < n; i++ { tw.data[i] = i } w, err := New( tw, MaxWorker(maxWorker), MultithreadedThreshold(multithreadedThreshold), ) if err != nil { t.Fatalf("New(...): %s", err.Error()) } err = w.Do() if err != nil { t.Fatalf("%s: %s", fn, err.Error()) } for i := 0; i < n; i++ { if tw.data[i] != -i { t.Fatalf("%s: found %d, expected %d", fn, tw.data[i], -i) } } st := int32(0) threads := maxWorker if maxWorker == 1 || multithreadedThreshold > n { st = 1 threads = 1 } if n == 0 { st = 0 } else if n == 1 { st = 1 } if threads > n { threads = n } if tw.singleThread != st { t.Fatalf("%s: single threads count is %d, expected %d", fn, tw.singleThread, st) } if tw.started != int32(threads) { t.Fatalf("%s: started %d threads, expected %d", fn, tw.started, threads) } if tw.finished != int32(threads) { t.Fatalf("%s: finished %d threads, expected %d", fn, tw.finished, threads) } } //-------------------------------// check("Singlethreaded 1", 1, 0) check("Singlethreaded 2", 1, n) check("Singlethreaded 3", 1, n+1) check("Multithreaded 1.1", 5, 0) check("Multithreaded 1.2", 5, n) check("Multithreaded 1.3", 5, n+1) check("Multithreaded 2.1", 100, 0) check("Multithreaded 2.2", 100, n) check("Multithreaded 2.3", 100, n+1) if n < 500 { check("Multithreaded 3.1", n, 0) check("Multithreaded 3.2", n, n) check("Multithreaded 3.3", n, n+1) } } //----------------------------------------------------------------------------------------------------------------------------// func BenchmarkComplexSingleClean(b *testing.B) { benchmarkComplex(b, b.N, true, true) } func BenchmarkComplexMultiClean(b *testing.B) { benchmarkComplex(b, b.N, true, false) } func BenchmarkComplexSingle(b *testing.B) { benchmarkComplex(b, b.N, false, true) } func BenchmarkComplexMulti(b *testing.B) { benchmarkComplex(b, b.N, false, false) } func benchmarkComplex(b *testing.B, n int, clean bool, single bool) { tw := &testWorker{ data: make([]int, n), clean: clean, withDelay: true, } for i := 0; i < n; i++ { tw.data[i] = i } multithreadedThreshold := 0 if single { multithreadedThreshold = n + 1 } w, err := New( tw, Flags(FlagFailOnError), MultithreadedThreshold(multithreadedThreshold), ) if err != nil { b.Fatalf("New: %s", err.Error()) } b.ResetTimer() err = w.Do() b.StopTimer() if err != nil { b.Fatalf("Do: %s", err.Error()) } } //----------------------------------------------------------------------------------------------------------------------------// type testWorker struct { data []int singleThread int32 started int32 finished int32 counter int32 clean bool withDelay bool } func (w *testWorker) ElementsCount() int { return len(w.data) } func (w *testWorker) GetElement(idx int) interface{} { return w.data[idx] } func (w *testWorker) ProcInitFunc(workerID int) { if workerID < 0 { atomic.AddInt32(&w.singleThread, 1) } atomic.AddInt32(&w.started, 1) } func (w *testWorker) ProcFinishFunc(workerID int) { atomic.AddInt32(&w.finished, 1) } func (w *testWorker) ProcFunc(idx int, data interface{}) (err error) { if !w.clean { w.data[idx] = -w.data[idx] atomic.AddInt32(&w.counter, 1) if w.withDelay { misc.Sleep(100 * time.Microsecond) } } return } //----------------------------------------------------------------------------------------------------------------------------//
<filename>patterns_behavioral/chain_of_responsibility_patterns/src/main/java/com/example/chain/demo/builderchain_origin/Test.java package com.example.chain.demo.builderchain_origin; public class Test { public static void main(String[] args) { User user = new User(); user.id = "1000"; test(user); } /** * java 引用测试 */ public static void test(User user) { User user1; User user2; user1 = user2 = user; System.out.println(user1); System.out.println(user2); System.out.println("========="); user2.id = "2000"; System.out.println(user1); System.out.println(user2); } public static class User { public String id; @Override public String toString() { return "User{" + "id='" + id + '\'' + '}'; } } }
def image_name(self): if self.instance is None: return image = image_get(self._request, self.instance.image['id']) return image.name
/** * Repositories. */ package pl.softfly.integ.endpoint.repository;
/** * Set next argument * @param ezArgument argument * @return self */ public EzCommand then(EzArgument ezArgument) { if (registered) return this; literalArgumentBuilder.then(ezArgument.requiredArgumentBuilder); return this; }
Commuters wait to board the Paris Metro (Photo via YouTube) This article originally appeared on VICE France According to a recent report, every single woman who's used Parisian public transport has been sexually harassed in some way or another. The report is quite detailed in their definitions of assault but for some reasons it doesn't list frotters. A "frotter," for those privileged enough not to know, is someone who derives sexual pleasure by rubbing their bits and pieces up against unsuspecting people in crowded public spaces. Concerned, I headed over to French health and sex forum Doctissimo, where I found a chat room called "Transport Fantasies." It was full of frotter testimonies—the kind of stories that'll make you think twice before taking the underground ever again. Ten minutes of scrolling through the pages made me understand why Japan has introduced women only carriages on some of their trains. An idea that is also currently being lobbied for by some UK politicians. Curious to find out what this was all about, I reached out to some of the people in the forum. After some rather colorful exchanges—most notably one with a cross-dresser who liked to be photographed without underwear on the train—I was contacted by Maxine, who for obvious reasons didn't feel like telling me his real name. The 38-year-old computer engineer is part of a subway frotting community that he says has been active for roughly 20 years. I gave him a call to ask just what the hell he was up to. VICE: Hey Maxine, so you are into fantasizing about women on trains. What's the deal with that? Maxine: Basically, proximity on public transport is something that hugely excites me and has done for years. I get a lot of heat for it on different forums, to be honest. People calling me things like "pervert," "sick," or "disturbed"—you hear those words a lot. But I just want to clarify: I am not a sex offender. I've never made a salacious remark to a girl or touched her breasts or anything like that. I'm not what you'd call a predator. If you say so. Can you tell me how this fantasy came about? Until I was 19, I lived in this tiny little village. When I moved to Paris, I discovered the underground and it was really interesting. It was exotic to me: the faces, the hair, the legs, the bra straps. A lot of people see commuting as a chore but I love that physical proximity, the humidity and the way I can see women's bodies right up close, as if I have a zoom lens or something. It all started with this one businesswoman in a skirt suit—I suppose what you'd call a MILF these days—who was forced to push her breasts up against me on the train because it was so packed. I was a kid and it had a huge effect on me. Ever since that day, I've spent a lot of my spare time on the metro. The winter is no good because of things like flu and heavy jackets—I can't get quite as close to the girls as I'd like to. The way it works is that I spot a pretty girl and try to sit down next to her. Then I touch up against them very lightly and begin to fantasize about some different scenarios. Below, watch our documentary 'China's Elite Female Bodyguards': Are you aware of the fact that guys like you are a real nuisance to women? You shouldn't look at everything as so black and white. Some girls probably get annoyed because they aren't feeling that sexy at the particular time of day. Right after they've finished work for example. They are probably taken by surprise. But some others actually enjoy the attention. I don't rub myself on them like a dog or anything. I go soft and establish a sort of physical closeness, which may—or may not—result in something more. When I was single, several girls smiled at me in the subway. Sometimes you could see they were interested. Other times, people just give me evils. I'm pretty sure that most people don't even notice me touching up against them. I'm more of a "gentleman" frotter. Does your partner know about your frottage? No, not at all. I don't think she'd get it. After the media storm about subway frotters, I've heard her discuss the issue angrily with her girlfriends. But you know, I'm very active on these internet forums and talk a lot with women who have the same kind of fantasies. A lot of guys might cheat on their partners but I've found a way to remain faithful while still being able to live out my sexual fantasy. Are you aware that what you do is basically sexual assault? Like I've said, there is a distinction. I've never insulted a woman, shown my penis, or tried to stick my hand up their skirt. I condemn all of those things. As far as I know, proximity in public transport isn't a crime—even if it sometimes results in uncontrolled erections. Look at nightclubs. It's not strange to rub up against a stranger while you're dancing, you know? Guys who do that aren't considered perverts. Why are things on a train different? It's a fantasy, just like doctor waiting room scenarios. Photo via Wikimedia Commons. None of these people are frotters. Well, apart from the fact no one gets felt up without their consent in those scenarios. I've always acted correctly. Just have a look online, you'll see I'm not the only one interested in this. There's so many kinky videos that start with two strangers talking on a train. It's become a cliché. No one has ever protested, called for censorship or had to apologize for sexual assault. Go on YouPorn, there's a category called "fantasy." Just try and type subway in there and you'll find pages upon pages of videos that start with stroking on a subway train. Right.
import { ResetPasswordPage } from "eri"; import { createCognitoUser } from "../../cognito"; import { ERRORS, TEST_IDS } from "../../constants"; export default function ResetPassword() { return ( <ResetPasswordPage data-test-id={TEST_IDS.resetPasswordPage} onSubmit={async ({ code, email, password, setSubmitError }) => new Promise((resolve, reject) => { createCognitoUser(email).confirmPassword(code, password, { onSuccess: () => resolve(), // eslint-disable-next-line @typescript-eslint/no-explicit-any onFailure: (e: any) => { switch (e.code) { case "CodeMismatchException": setSubmitError( "Incorrect verification code, check the data you have entered and try again" ); break; case "ExpiredCodeException": setSubmitError( "This verification code has expired, please request another one and try again" ); break; case "NetworkError": setSubmitError(ERRORS.network); break; default: setSubmitError( "Something went wrong, check the data you have entered and try again" ); } reject(JSON.stringify(e)); }, }); }) } /> ); }
Steam for Linux, the digital distribution platform developed by Valve, has been patched again, this time bringing a fix for a bug that corrupted textures on Intel machines. Steam for Linux is still in the Beta stage of development and Valve is making a lot of changes to the client. Highlights of the new Steam for Linux update: • An audio hang in games on systems with XDG_RUNTIME_DIR set (Kubuntu 12.10) has been fixed; • A crash which occurred on startup and which was caused by a corrupt ClientRegistry.blob file has been fixed; • A bug w/overlay corrupting textures on Intel GPU machines has been repaired. A complete list of changes can be found in the official announcement. Keep in mind that this is Beta software, so problems are bound to appear. Download Steam for Linux installer 1.0.0.22 right now from Softpedia.
// PutFileSplitWriter writes a multiple files to PFS by splitting up the data // that is written to it. // NOTE: PutFileSplitWriter returns an io.WriteCloser you must call Close on it when // you are done writing. func (c APIClient) PutFileSplitWriter(repoName string, commitID string, path string, delimiter pfs.Delimiter, targetFileDatums int64, targetFileBytes int64, overwrite bool) (io.WriteCloser, error) { var overwriteIndex *pfs.OverwriteIndex if overwrite { overwriteIndex = &pfs.OverwriteIndex{0} } return c.newPutFileWriteCloser(repoName, commitID, path, delimiter, targetFileDatums, targetFileBytes, overwriteIndex) }
// The 'archiver' dependency includes an API called 'Unarchive' to extract archive files. This API uses the archive file // extension to determine the archive type.// the local file path to extract the archive. // We therefore need to use the file name as it was in Artifactory, and not the file name which was downloaded. To achieve this, // we added a new implementation of the 'Unarchive' func and use it instead of the default one. func Unarchive(localArchivePath, originArchiveName, destinationPath string) error { uaIface, err := byExtension(originArchiveName) if err != nil { return err } u, ok := uaIface.(archiver.Unarchiver) if !ok { return errorutils.CheckError(errors.New("format specified by source filename is not an archive format: " + originArchiveName)) } return u.Unarchive(localArchivePath, destinationPath) }
<filename>udpconn_test.go<gh_stars>0 package main import ( "errors" "net" "os" "syscall" "time" ) type UDPConnPacket struct { data []byte oob []byte flags int addr *net.UDPAddr err error } type UDPConnMock struct { network string laddr *net.UDPAddr closed bool written map[string][][]byte toRead []*UDPConnPacket readDeadline time.Time } func (u *UDPConnMock) Close() error { if u.closed { return errors.New("connection already closed") } u.closed = true return nil } func (u *UDPConnMock) File() (f *os.File, err error) { return nil, nil } func (u *UDPConnMock) LocalAddr() net.Addr { return u.laddr } func (u *UDPConnMock) Read(b []byte) (int, error) { n, _, err := u.ReadFromUDP(b) return n, err } func (u *UDPConnMock) ReadFrom(b []byte) (int, net.Addr, error) { return u.ReadFromUDP(b) } func (u *UDPConnMock) ReadFromUDP(b []byte) (int, *net.UDPAddr, error) { n, _, _, addr, err := u.ReadMsgUDP(b, nil) return n, addr, err } func (u *UDPConnMock) ReadMsgUDP(b, oob []byte) (n, oobn, flags int, addr *net.UDPAddr, err error) { if u.toRead == nil || len(u.toRead) == 0 { return 0, 0, 0, nil, newTimeout() } packet := u.toRead[0] u.toRead = u.toRead[1:] for i := 0; i < len(packet.data); i++ { b[i] = packet.data[i] } if packet.oob != nil { for i := 0; i < len(packet.oob); i++ { oob[i] = packet.oob[i] } } return len(packet.data), len(packet.oob), packet.flags, packet.addr, packet.err } func (u *UDPConnMock) RemoteAddr() net.Addr { return nil } func (u *UDPConnMock) SetDeadline(t time.Time) error { return nil } func (u *UDPConnMock) SetReadBuffer(bytes int) error { return nil } func (u *UDPConnMock) SetReadDeadline(t time.Time) error { u.readDeadline = t return nil } func (u *UDPConnMock) SetWriteBuffer(bytes int) error { return nil } func (u *UDPConnMock) SetWriteDeadline(t time.Time) error { return nil } func (u *UDPConnMock) SyscallConn() (syscall.RawConn, error) { return nil, nil } func (u *UDPConnMock) Write(b []byte) (int, error) { return 0, nil } func (u *UDPConnMock) WriteMsgUDP(b, oob []byte, addr *net.UDPAddr) (n, oobn int, err error) { return 0, 0, nil } func (u *UDPConnMock) WriteTo(b []byte, addr net.Addr) (int, error) { return 0, nil } func (u *UDPConnMock) WriteToUDP(b []byte, addr *net.UDPAddr) (int, error) { if u.written == nil { u.written = map[string][][]byte{} } endpoint := addr.String() if _, found := u.written[endpoint]; !found { u.written[endpoint] = make([][]byte, 0, 1) } u.written[endpoint] = append(u.written[endpoint], b) return len(b), nil } type timeoutError struct{} func (timeoutError) Error() string { return "i/o timeout" } func (timeoutError) Timeout() bool { return true } func (timeoutError) Temporary() bool { return false } func newTimeout() timeoutError { return timeoutError{} }
Copyright by WATE - All rights reserved WATE 6 On Your Side Staff - KNOXIVLLE (WATE) - The man thought to be motioning children to his vehicle in West Knox County has been located and determined not to be a threat, said deputies. On Wednesday, the Knox County Sheriff's Office issued the description of the vehicle the man was last seen driving: a four door, fire engine red sedan. He was reported motioning children inside his vehicle near Blue Grass Elementary School on Sunday and Tuesday around 5 p.m. Deputies described him as 60 year old white man with gray hair. He was wearing wire rimmed glasses. On Friday morning, deputies said the man was found. Officials have not released his name his name. Deputies said the man lives in the area of the reported incidents and was waving to the children. A witness came forth with information saying they saw the man waving to one of the children, according to deputies.
def _sync_without_cleanup( resource_df: DataFrame, sync_db: sqlalchemy.engine.base.Engine ) -> DataFrame: return sync_to_db_without_cleanup( resource_df=resource_df, identity_columns=["id"], resource_name=COURSES_RESOURCE_NAME, sync_db=sync_db, )
/* Returns true when PHI is a loop close phi node. */ static bool scalar_close_phi_node_p (gimple phi) { if (gimple_code (phi) != GIMPLE_PHI || !is_gimple_reg (gimple_phi_result (phi))) return false; return (gimple_phi_num_args (phi) == 1); }
/** * Created by Victor Oliveira on 06/12/18. * github: victorlopejg */ public class ListPresenterFake implements ListContract.Presenter { private ListContract.View mListView; ListPresenterFake(ListContract.View mListView) { this.mListView = mListView; } @Override public void listRoom() { Room room = new Room(); List<Room> list = new ArrayList<>(); for (int i = 0; i < Room.ROOMS.length; i++) { room.setId(i + 1); room.setName(Room.ROOMS[i]); room.setStatus(0); list.add(room); } mListView.showList(list); } @Override public void updateStatus(Room room, boolean status) { } @Override public void openDetails(Room room) { } }
Sun Belt Conference to air games on American Sports Network NEW ORLEANS - The Sun Belt Conference will air football and basketball games throughout the 2016-17 season on the American Sports Network. The announcement was made Wednesday. While games on ASN will be broadcasts on “over-the-air” and cable affiliates, they will also be available without blackout restrictions on ESPN3 and the WatchESPN app. The partnership, made possible through a sublicense agreement with ESPN, allows for a minimum of eight football games and a minimum of 10 basketball games from the Sun Belt to be aired through ASN. The sublicense agreement reached by ESPN and ASN allows the Sinclair Broadcast Group-owned network to produce and distribute Sun Belt Conference games via Sinclair affiliate television stations and other over the air and cable stations nationwide. Sinclair Broadcast Group is the largest owner of television stations in the United States, currently owning or operating 173 television stations in 81 markets nationwide and having affiliations with all the major networks. The announcement Monday also comes with the announcement that the following Sun Belt football games will be part of the Sun Belt and ASN venture. Friday, September 2 Ball State at Georgia State - 6 p.m. Saturday, September 3 Boise State at UL Lafayette - 11 a.m. Saturday, September 10 Old Dominion at Appalachian State - 2:30 p.m. Georgia Southern at South Alabama - 6 p.m. Saturday, September 17 South Alabama at UL Lafayette - TBA All times are central and subject to change. Additional games will be released at later dates by ASN and the Sun Belt. About the American Sports Network American Sports Network is a sports programming division of Sinclair Networks Group, LLC, a division of Sinclair Broadcast Group, Inc. ASN produces broadcasts of sporting events, including NCAA Division I multiple sports, NCAA Division II football, local high school sports under the “Thursday Night Lights” and “Friday Night Rivals” brands, Major League Soccer’s Real Salt Lake City and D.C. United franchises, and the college football Arizona Bowl. The company, which launched in 2014, produced more than 375 events last year and is now a dedicated 24/7 broadcast network in 22 markets. For more information, visit ASN’s website at www.americansportsnet.com. About the Sun Belt Conference The Sun Belt Conference’s “Together We Rise” motto isn’t just hyperbole. It’s proven fact on many different levels, not the least being its place in the hierarchy of collegiate athletics. The league’s embracing of innovations and advancements, its constant efforts to adapt in the ever-changing world of college athletics, and its continuing process of evolving to better serve its membership has been well documented since the day the conference was founded in 1976. But one thing hasn’t changed: Since that founding, the Sun Belt Conference has always been a league of opportunity. Athletic and academic programs who have shown progressive thinking and the desire to improve have always found a home in the Sun Belt. For student-athletes, Sun Belt institutions are leaders in providing career services and the tools to assist in the pursuit of personal goals. Sun Belt members all employ full-time academic advisors and have on average more than 20 tutors for academic support. During the 2015-16 academic year, Sun Belt member institutions awarded over $50 million in scholarships to more than 3,100 student-athletes, while also providing life experiences such as team building, domestic and foreign travel, community service, mentoring, and recognition through honors and awards. The Sun Belt Conference’s membership consists of 11 football members – Appalachian State, Arkansas State, Georgia Southern, Georgia State, Idaho, UL Lafayette, UL Monroe, New Mexico State, South Alabama, Texas State and Troy. Little Rock and UT Arlington compete in all-sports as non-football members while Idaho and New Mexico State are football only members. Coastal Carolina joined the Sun Belt Conference in all sports but football in 2016. The CCU football program will begin Sun Belt play in 2017. For more information on the Sun Belt Conference, visit www.sunbeltsports.org
/** * Copyright 2005-2015 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.datadictionary.validation.processor; import org.junit.Before; import org.kuali.rice.core.api.uif.DataType; import org.kuali.rice.krad.datadictionary.AttributeDefinition; import org.kuali.rice.krad.datadictionary.BusinessObjectEntry; import org.kuali.rice.krad.datadictionary.validation.AttributeValueReader; import org.kuali.rice.krad.datadictionary.validation.DictionaryObjectAttributeValueReader; import org.kuali.rice.krad.datadictionary.validation.ErrorLevel; import org.kuali.rice.krad.datadictionary.validation.constraint.CaseConstraint; import org.kuali.rice.krad.datadictionary.validation.constraint.Constraint; import org.kuali.rice.krad.datadictionary.validation.constraint.MustOccurConstraint; import org.kuali.rice.krad.datadictionary.validation.constraint.PrerequisiteConstraint; import org.kuali.rice.krad.datadictionary.validation.constraint.ValidCharactersConstraint; import org.kuali.rice.krad.datadictionary.validation.constraint.WhenConstraint; import org.kuali.rice.krad.datadictionary.validation.result.ConstraintValidationResult; import org.kuali.rice.krad.datadictionary.validation.result.DictionaryValidationResult; import org.kuali.rice.krad.datadictionary.validation.result.ProcessorResult; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * * @author <NAME> (<EMAIL>) */ public abstract class BaseConstraintProcessorTest<P extends ConstraintProcessor> { protected AttributeDefinition street1Definition; protected AttributeDefinition street2Definition; protected AttributeDefinition stateDefinition; protected AttributeDefinition postalCodeDefinition; protected AttributeDefinition countryDefinition; protected BusinessObjectEntry addressEntry; protected DictionaryValidationResult dictionaryValidationResult; protected P processor; protected CaseConstraint countryIsUSACaseConstraint; protected MustOccurConstraint topLevelConstraint; @SuppressWarnings("boxing") @Before public void setUp() throws Exception { processor = newProcessor(); dictionaryValidationResult = new DictionaryValidationResult(); dictionaryValidationResult.setErrorLevel(ErrorLevel.NOCONSTRAINT); addressEntry = new BusinessObjectEntry(); List<MustOccurConstraint> mustOccurConstraints = new ArrayList<MustOccurConstraint>(); PrerequisiteConstraint postalCodeConstraint = new PrerequisiteConstraint(); postalCodeConstraint.setPropertyName("postalCode"); PrerequisiteConstraint cityConstraint = new PrerequisiteConstraint(); cityConstraint.setPropertyName("city"); PrerequisiteConstraint stateConstraint = new PrerequisiteConstraint(); stateConstraint.setPropertyName("state"); List<PrerequisiteConstraint> cityStateDependencyConstraints = new ArrayList<PrerequisiteConstraint>(); cityStateDependencyConstraints.add(cityConstraint); cityStateDependencyConstraints.add(stateConstraint); MustOccurConstraint cityStateConstraint = new MustOccurConstraint(); cityStateConstraint.setMin(2); cityStateConstraint.setMax(2); cityStateConstraint.setPrerequisiteConstraints(cityStateDependencyConstraints); // This basically means that at least one of the two child constraints must be satisfied... either the postal code must be entered or _both_ the city and state topLevelConstraint = new MustOccurConstraint(); topLevelConstraint.setMax(2); topLevelConstraint.setMin(1); topLevelConstraint.setPrerequisiteConstraints(Collections.singletonList(postalCodeConstraint)); topLevelConstraint.setMustOccurConstraints(Collections.singletonList(cityStateConstraint)); mustOccurConstraints.add(topLevelConstraint); addressEntry.setMustOccurConstraints(mustOccurConstraints); List<WhenConstraint> whenConstraints = new ArrayList<WhenConstraint>(); PrerequisiteConstraint prerequisiteConstraint = new PrerequisiteConstraint(); prerequisiteConstraint.setPropertyName("state"); WhenConstraint whenConstraint1 = new WhenConstraint(); whenConstraint1.setValue("USA"); whenConstraint1.setConstraint(prerequisiteConstraint); whenConstraints.add(whenConstraint1); countryIsUSACaseConstraint = new CaseConstraint(); countryIsUSACaseConstraint.setCaseSensitive(false); // countryIsUSACaseConstraint.setFieldPath("country"); countryIsUSACaseConstraint.setWhenConstraint(whenConstraints); List<AttributeDefinition> attributes = new ArrayList<AttributeDefinition>(); ValidCharactersConstraint street1ValidCharactersConstraint = new ValidCharactersConstraint(); street1ValidCharactersConstraint.setValue("regex:\\d{3}\\s+\\w+\\s+Ave"); street1Definition = new AttributeDefinition(); street1Definition.setName("street1"); street1Definition.setValidCharactersConstraint(street1ValidCharactersConstraint); attributes.add(street1Definition); street2Definition = new AttributeDefinition(); street2Definition.setName("street2"); attributes.add(street2Definition); AttributeDefinition cityDefinition = new AttributeDefinition(); cityDefinition.setName("city"); attributes.add(cityDefinition); ValidCharactersConstraint stateValidCharactersConstraint = new ValidCharactersConstraint(); stateValidCharactersConstraint.setValue("ABCD"); stateDefinition = new AttributeDefinition(); stateDefinition.setName("state"); stateDefinition.setValidCharactersConstraint(stateValidCharactersConstraint); attributes.add(stateDefinition); postalCodeDefinition = new AttributeDefinition(); postalCodeDefinition.setName("postalCode"); postalCodeDefinition.setExclusiveMin("1000"); postalCodeDefinition.setInclusiveMax("99999"); postalCodeDefinition.setDataType(DataType.LONG); attributes.add(postalCodeDefinition); countryDefinition = new AttributeDefinition(); countryDefinition.setName("country"); countryDefinition.setCaseConstraint(countryIsUSACaseConstraint); attributes.add(countryDefinition); addressEntry.setAttributes(attributes); } protected ConstraintValidationResult process(Object object, String attributeName, Constraint constraint) { return processRaw(object, attributeName, constraint).getFirstConstraintValidationResult(); } protected ProcessorResult processRaw(Object object, String attributeName, Constraint constraint) { AttributeValueReader attributeValueReader = new DictionaryObjectAttributeValueReader(object, "org.kuali.rice.kns.datadictionary.validation.MockAddress", addressEntry); attributeValueReader.setAttributeName(attributeName); Object value = attributeName != null ? attributeValueReader.getValue() : object; return processor.process(dictionaryValidationResult, value, constraint, attributeValueReader); } protected abstract P newProcessor(); }
/** * Prints an empty row within a single slot */ private void printEmpty() { System.out.print("|"); for (int i = 0; i < mCardWidth - 2; ++i) { System.out.print(" "); } System.out.print("|"); }
def logarithmic_transformation2D(array_2D): c = 1 / numpy.log(1 + numpy.abs(numpy.amax(array_2D))) return c * numpy.log(1 + numpy.abs(array_2D))
package com.cadiducho.telegrambotapi; import lombok.Getter; import lombok.Setter; /** * Represents a photo to be sent. */ @Getter @Setter public class InputMediaPhoto extends InputMedia { public InputMediaPhoto(String media, String caption, ParseMode parseMode, Boolean disableContentTypeDetection) { super("photo", media, caption, parseMode, disableContentTypeDetection); } }
/** * The abstract class provides a set of methods to out outputs into the sepcified archive file. */ public abstract class AbstractStaticWeaveOutputHandler{ protected JarOutputStream outputStreamHolder=null; /** * create directory into target directory, or insert directory entry into outputstream. * @param dirPath * @throws IOException */ abstract public void addDirEntry(String dirPath)throws IOException; /** * Write entry bytes into target, this is usually called if class has been tranformed * @param targetEntry * @param entryBytes * @throws IOException */ abstract public void addEntry(JarEntry targetEntry,byte[] entryBytes)throws IOException; /** * Write entry into target, this method usually copy original class into target. * @param jis * @param entry * @throws IOException */ abstract public void addEntry(InputStream jis,JarEntry entry) throws IOException,URISyntaxException; /** * Close the output stream. * @throws IOException */ public void closeOutputStream() throws IOException { if(outputStreamHolder!=null){ outputStreamHolder.close(); } } /** * Get the ouput stream instance. * @return */ public JarOutputStream getOutputStream(){ return this.outputStreamHolder; } // This is part of the ugly workaround for a design flaw // in the JDK zip API, the entry will not write into the target zip file // properly if this method not being gone through. protected void readwriteStreams(InputStream in, OutputStream out) throws IOException { int numRead; byte[] buffer = new byte[8*1024]; while ((numRead = in.read(buffer,0,buffer.length)) != -1) { out.write(buffer,0,numRead); } } }
def is_extend_triggered(self, threshold=0.7, limit=None): util = self.check_utilization() if util < threshold: return False, util if limit and self.zdb_total_size >= limit: return False, util return True, util
import axios from 'axios' import React, { useContext } from 'react' import { useMutation } from 'react-query' import { Redirect } from 'react-router-dom' import UserForm from '../components/UserForm' import { AppContext } from '../store/app-context' const postUser = async (newUser) => await (await axios.post('https://60f834089cdca000174552b0.mockapi.io/api/vi/users', newUser)).data function CreateUser() { const [flashMessage, setFlashMessage] = useContext(AppContext) const mutation = useMutation<any, any>((newUser) => postUser(newUser), { onSuccess: (data) => { setFlashMessage( `New User Created - Id: ${data.id} Name: ${data.first_name} ${data.last_name}` ) }, }) const { isLoading, isError, error, isSuccess } = mutation const onSubmit = async (data) => { mutation.mutate(data) } if (isSuccess) { return <Redirect to="/" /> } console.log('error', error); return ( <div> <h2>New User</h2> {isError && <div>An error occurred: {error.message}</div>} {isLoading && <div>Loading...</div>} <UserForm submitText="Create" submitAction={onSubmit} /> </div> ) } export default CreateUser
/** * A utility class to help with building FriendFilterPredicate objects. */ public class FriendFilterPredicateBuilder { private HashSet<FriendName> friendNames; private HashSet<LogName> logNames; private HashSet<Tag> tags; /** * Creates a {@code FriendFilterPredicateBuilder} with the default details. */ public FriendFilterPredicateBuilder() { this.friendNames = new HashSet<FriendName>(); this.logNames = new HashSet<LogName>(); this.tags = new HashSet<Tag>(); } /** * Adds a name keyword to the {@code FriendFilterPredicate} that we are building. */ public FriendFilterPredicateBuilder withNameSubstring(String nameSubstring) { friendNames.add(new FriendName(nameSubstring)); return this; } /** * Adds a log title keyword to the {@code FriendFilterPredicate} that we are building. */ public FriendFilterPredicateBuilder withLogTitleSubstring(String logTitleSubstring) { logNames.add(new LogName(logTitleSubstring)); return this; } /** * Adds a tag keyword to the {@code FriendFilterPredicate} that we are building. */ public FriendFilterPredicateBuilder withTagSubstring(String tagSubstring) { tags.add(new Tag(tagSubstring)); return this; } /** * Builds and returns the FriendFilterPredicate with the data in the builder. */ public FriendFilterPredicate build() { return new FriendFilterPredicate(friendNames, logNames, tags); } }
package jp.pepper_atelier_akihabara.qisdk_wrapper.action; import com.aldebaran.qi.Consumer; import com.aldebaran.qi.Function; import com.aldebaran.qi.Future; import com.aldebaran.qi.sdk.builder.GoToBuilder; import com.aldebaran.qi.sdk.builder.LookAtBuilder; import com.aldebaran.qi.sdk.builder.TransformBuilder; import com.aldebaran.qi.sdk.object.actuation.Frame; import com.aldebaran.qi.sdk.object.actuation.FreeFrame; import com.aldebaran.qi.sdk.object.actuation.GoTo; import com.aldebaran.qi.sdk.object.actuation.LookAt; import com.aldebaran.qi.sdk.object.actuation.LookAtMovementPolicy; import com.aldebaran.qi.sdk.object.geometry.Transform; import com.aldebaran.qi.sdk.object.geometry.Vector3; import jp.pepper_atelier_akihabara.qisdk_wrapper.QLAction; import jp.pepper_atelier_akihabara.qisdk_wrapper.QLPepper; import jp.pepper_atelier_akihabara.qisdk_wrapper.value.QLHuman; import jp.pepper_atelier_akihabara.qisdk_wrapper.value.QLFrame; public class QLLookAt extends QLFrameAction { private LookAtMovementPolicy movementPolicy = LookAtMovementPolicy.HEAD_AND_BASE; public QLLookAt(QLPepper qlPepper) { super(qlPepper); isAlwaysCanceled = true; actionTypeList.add(ActionType.Move); actionTypeList.add(ActionType.Animate); } /** * 移動先の指定 * @param qlHuman 基点となる位置 * @param locationX 向き先の座標。基点を中心に指定メートル前を向く。マイナスは後ろ。 * @param locationY 向き先の座標。基点を中心に指定メートル右を向く。マイナスは左。 * @return */ public QLLookAt setDestination(QLHuman qlHuman, double locationX, double locationY){ return setDestination(qlHuman, locationX, locationY, 1.2); } /** * 移動先の指定 * @param qlFrame 基点となる位置 * @param locationX 向き先の座標。基点を中心に指定メートル前を向く。マイナスは後ろ。 * @param locationY 向き先の座標。基点を中心に指定メートル右を向く。マイナスは左。 * @return */ public QLLookAt setDestination(QLFrame qlFrame, double locationX, double locationY){ return setDestination(qlFrame, locationX, locationY, 1.2); } /** * 向き先の指定 * @param qlHuman 基点となる位置 * @param locationX 向き先の座標。基点を中心に指定メートル前を向く。マイナスは後ろ。 * @param locationY 向き先の座標。基点を中心に指定メートル右を向く。マイナスは左。 * @param locationZ 向き先の座標。基点を中心に指定メートル上方を向く。 * @return */ public QLLookAt setDestination(QLHuman qlHuman, double locationX, double locationY, double locationZ){ this.qlHuman = qlHuman; this.qlFrame = null; this.transform = TransformBuilder.create().fromTranslation(new Vector3(locationX, locationY, locationZ)); return this; } /** * 向き先の指定 * @param qlFrame 基点となる位置 * @param locationX 向き先の座標。基点を中心に指定メートル前を向く。マイナスは後ろ。 * @param locationY 向き先の座標。基点を中心に指定メートル右を向く。マイナスは左。 * @param locationZ 向き先の座標。基点を中心に指定メートル上方を向く。 * @return */ public QLLookAt setDestination(QLFrame qlFrame, double locationX, double locationY, double locationZ){ this.qlFrame = qlFrame; this.qlHuman = null; this.transform = TransformBuilder.create().fromTranslation(new Vector3(locationX, locationY, locationZ)); return this; } /** * 首だけで指定の方向を向くように設定 * @return */ public QLLookAt setHeadOnlyMovement(){ this.actionTypeList.remove(ActionType.Move); this.movementPolicy = LookAtMovementPolicy.HEAD_ONLY; return this; } @Override protected Future<Void> execute() { Future<Void> futureVoid = null; futureVoid = makeFrame(); if(futureVoid != null){ futureVoid = runLookAt(futureVoid); } return futureVoid; } private Future<Void> runLookAt(Future<Void> futureVoid){ return futureVoid.andThenCompose(new Function<Void, Future<Frame>>() { @Override public Future<Frame> execute(Void aVoid) throws Throwable { return targetFreeFrame.async().frame(); } }).andThenCompose(new Function<Frame, Future<LookAt>>() { @Override public Future<LookAt> execute(Frame frame) throws Throwable { return LookAtBuilder.with(qiContext).withFrame(frame).buildAsync(); } }).andThenCompose(new Function<LookAt, Future<Void>>() { @Override public Future<Void> execute(LookAt lookAt) throws Throwable { lookAt.setPolicy(movementPolicy); isSuccess = true; return lookAt.async().run(); } }).thenConsume(new Consumer<Future<Void>>() { @Override public void consume(Future<Void> future) throws Throwable { // nop } }); } }
<gh_stars>0 package units import ( "context" "fmt" "time" "github.com/evergreen-ci/evergreen" "github.com/evergreen-ci/evergreen/model/event" "github.com/evergreen-ci/evergreen/model/host" "github.com/evergreen-ci/utility" "github.com/mongodb/amboy" "github.com/mongodb/amboy/dependency" "github.com/mongodb/amboy/job" "github.com/mongodb/amboy/registry" "github.com/mongodb/grip" "github.com/mongodb/grip/message" "github.com/mongodb/jasper/options" "github.com/pkg/errors" ) const ( userDataDoneJobName = "user-data-done" ) type userDataDoneJob struct { HostID string `bson:"host_id" json:"host_id" yaml:"host_id"` job.Base `bson:"base" json:"base" yaml:"base"` env evergreen.Environment settings *evergreen.Settings host *host.Host } func init() { registry.AddJobType(userDataDoneJobName, func() amboy.Job { return makeUserDataDoneJob() }) } func makeUserDataDoneJob() *userDataDoneJob { j := &userDataDoneJob{ Base: job.Base{ JobType: amboy.JobType{ Name: userDataDoneJobName, Version: 0, }, }, } j.SetDependency(dependency.NewAlways()) return j } // NewUserDataDoneJob creates a job that checks if the host is done provisioning // with user data (if bootstrapped with user data). This check only applies to // spawn hosts, since hosts running agents check into the server to verify their // liveliness. func NewUserDataDoneJob(env evergreen.Environment, hostID string, ts time.Time) amboy.Job { j := makeUserDataDoneJob() j.HostID = hostID j.env = env j.SetPriority(1) j.SetID(fmt.Sprintf("%s.%s.%s", userDataDoneJobName, j.HostID, ts.Format(TSFormat))) j.SetScopes([]string{fmt.Sprintf("%s.%s", userDataDoneJobName, hostID)}) j.SetShouldApplyScopesOnEnqueue(true) j.UpdateRetryInfo(amboy.JobRetryOptions{ Retryable: utility.TruePtr(), MaxAttempts: utility.ToIntPtr(50), WaitUntil: utility.ToTimeDurationPtr(20 * time.Second), }) return j } func (j *userDataDoneJob) Run(ctx context.Context) { defer j.MarkComplete() defer func() { if j.HasErrors() && (!j.RetryInfo().ShouldRetry() || j.RetryInfo().GetRemainingAttempts() == 0) { event.LogHostProvisionFailed(j.HostID, j.Error().Error()) } }() if err := j.populateIfUnset(); err != nil { j.AddRetryableError(err) return } if j.host.Status != evergreen.HostStarting { j.UpdateRetryInfo(amboy.JobRetryOptions{ NeedsRetry: utility.TruePtr(), }) return } path := j.host.UserDataProvisioningDoneFile() if output, err := j.host.RunJasperProcess(ctx, j.env, &options.Create{ Args: []string{ j.host.Distro.ShellBinary(), "-l", "-c", fmt.Sprintf("ls %s", path)}}); err != nil { grip.Debug(message.WrapError(err, message.Fields{ "message": "host was checked but is not yet ready", "output": output, "host_id": j.host.Id, "distro": j.host.Distro.Id, "job": j.ID(), })) j.AddRetryableError(err) return } if j.host.IsVirtualWorkstation { if err := attachVolume(ctx, j.env, j.host); err != nil { grip.Error(message.WrapError(err, message.Fields{ "message": "can't attach volume", "host_id": j.host.Id, "distro": j.host.Distro.Id, "job": j.ID(), })) j.AddError(err) j.AddError(j.host.SetStatus(evergreen.HostProvisionFailed, evergreen.User, "decommissioning host after failing to mount volume")) terminateJob := NewHostTerminationJob(j.env, j.host, true, "failed to mount volume") terminateJob.SetPriority(100) j.AddError(amboy.EnqueueUniqueJob(ctx, j.env.RemoteQueue(), terminateJob)) return } if err := writeIcecreamConfig(ctx, j.env, j.host); err != nil { grip.Error(message.WrapError(err, message.Fields{ "message": "can't write icecream config file", "host_id": j.host.Id, "distro": j.host.Distro.Id, "job": j.ID(), })) } } if j.host.ProvisionOptions != nil && j.host.ProvisionOptions.SetupScript != "" { // Run the spawn host setup script in a separate job to avoid forcing // this job to wait for task data to be loaded. j.AddError(j.env.RemoteQueue().Put(ctx, NewHostSetupScriptJob(j.env, j.host))) } j.finishJob() } func (j *userDataDoneJob) finishJob() { if err := j.host.SetUserDataHostProvisioned(); err != nil { grip.Error(message.WrapError(err, message.Fields{ "message": "could not mark host that has finished running user data as done provisioning", "host_id": j.host.Id, "distro": j.host.Distro.Id, "job": j.ID(), })) j.AddRetryableError(err) return } } func (j *userDataDoneJob) populateIfUnset() error { if j.host == nil { h, err := host.FindOneId(j.HostID) if err != nil { return errors.Wrapf(err, "could not find host %s for job %s", j.HostID, j.ID()) } if h == nil { return errors.Errorf("could not find host %s for job %s", j.HostID, j.ID()) } j.host = h } if j.env == nil { j.env = evergreen.GetEnvironment() } if j.settings == nil { j.settings = j.env.Settings() } return nil }
def init_board(self): board = {} for i in range(9): board[i] = 0 return board
class Board: """ Holds all the Square instances and the BWE matrix. """ def __init__(self, squares, BWEmatrix = [], leah = 'noob coder'): # Squares self.squares = squares # BWE Matrix self.BWEmatrix = BWEmatrix # Noob self.leah = leah def draw(self,image): """ Draws the board and classifies the squares (draws the square state on the image). """ for square in self.squares: square.draw(image) square.classify(image) def assignBWE(self): """ Assigns initial setup states to squares and initialises the BWE matrix. """ for i in range(8): self.squares[8*i + 0].state = 'W' self.squares[8*i + 1].state = 'W' self.squares[8*i + 2].state = 'E' self.squares[8*i + 3].state = 'E' self.squares[8*i + 4].state = 'E' self.squares[8*i + 5].state = 'E' self.squares[8*i + 6].state = 'B' self.squares[8*i + 7].state = 'B' for square in self.squares: self.BWEmatrix.append(square.state) return self.BWEmatrix def updateBWE(self, matches, current): """ Updates the BWE by looking at the two squares that have changed and determining which one is now empty. This relies on calculated the distance in RGB space provided by the classify function. The one with a lower distance to the colour of its empty square must now be empty and its old state can be assigned to the other square that has changed. """ # Calculates distances to empty colors of squares distance_one = matches[0].classify(current) distance_two = matches[1].classify(current) if distance_one < distance_two: # Store old state old = matches[0].state # Assign new state matches[0].state = 'E' self.BWEmatrix[matches[0].index] = matches[0].state # Replace state of other square with the previous one of the currently white one matches[1].state = old self.BWEmatrix[matches[1].index] = matches[1].state else: # Store old state old = matches[1].state # Assign new state matches[1].state = 'E' self.BWEmatrix[matches[1].index] = matches[1].state # Replace state of other square with the previous one of the currently white one matches[0].state = old self.BWEmatrix[matches[0].index] = matches[0].state def getBWE(self): """ Converts BWE from list of strings to a rotated numpy array """ bwe = np.zeros((8,8),dtype=np.int8) counter = 0 for i in range(8): for j in range(8): if self.BWEmatrix[counter] == 'E': tmp = 0 elif self.BWEmatrix[counter] == 'W': tmp = 1 elif self.BWEmatrix[counter] == 'B': tmp = 2 bwe[i][j] = tmp counter += 1 # Rotation in return statement return np.rot90(bwe,k=1) def whichSquares(self, points): """ Returns the squares which a list of points lie within. This function is needed to filter out changes in the images that are compared which have nothing to do with the game, e.g. an arm. """ matches = [] for square in self.squares: for point in points: dist = cv2.pointPolygonTest(square.contours,point,False) if dist >= 0: matches.append(square) return matches