content
stringlengths
10
4.9M
class BaseTraditionalLearner: """ Base wrapper class for Traditional Learners. Args: task_type (str): Type of the current task. Currently only tabular tasks are supported. For more info on the tasks available in AutoPyTorch, see `autoPyTorch/constants.py` output_type (str): Type of output. The string depends on the output of sklearn's type_of_target. `see <https://scikit-learn.org/stable/modules/generated/sklearn.utils.multiclass.type_of_target.html>` logger_port (int) (default=logging.handlers.DEFAULT_TCP_LOGGING_PORT) random_state (Optional[np.random.RandomState]): name (str, default=''): Name of the learner, when not specified, uses the name of the class """ def __init__(self, task_type: str, output_type: str, optimize_metric: Optional[str] = None, logger_port: int = logging.handlers.DEFAULT_TCP_LOGGING_PORT, random_state: Optional[np.random.RandomState] = None, name: Optional[str] = None): self.model: Optional[Union[CatBoost, BaseEstimator]] = None self.name = name if name is not None else self.__class__.__name__ self.logger_port = logger_port self.logger = get_named_client_logger( name=self.name, host='localhost', port=logger_port, ) if random_state is None: self.random_state = check_random_state(1) else: self.random_state = check_random_state(random_state) self.config = self.get_config() self.all_nan: Optional[np.ndarray] = None self.num_classes: Optional[int] = None self.is_classification = STRING_TO_TASK_TYPES[task_type] not in REGRESSION_TASKS self.metric = get_metrics(dataset_properties={'task_type': task_type, 'output_type': output_type}, names=[optimize_metric] if optimize_metric is not None else None)[0] def get_config(self) -> Dict[str, Union[int, str, float, bool]]: """ Load the parameters for the classifier model from ../estimator_configs/modelname.json. """ dirname = os.path.dirname(os.path.abspath(__file__)) config_path = os.path.join(dirname, "../estimator_configs", self.name + ".json") with open(config_path, "r") as f: config: Dict[str, Union[int, str, float, bool]] = json.load(f) for k, v in config.items(): if v == "True": config[k] = True if v == "False": config[k] = False return config def _preprocess(self, X: np.ndarray ) -> np.ndarray: """ Preprocess the input set, currently imputes the nan columns. Can be used to add more preprocessing functionality Args: X (np.ndarray): input data Returns: (np.ndarray): Output data """ if self.all_nan is None: self.all_nan = np.all(pd.isnull(X), axis=0) X = X[:, ~self.all_nan] X = np.nan_to_num(X, copy=False) return X @abstractmethod def _prepare_model(self, X_train: np.ndarray, y_train: np.ndarray ) -> None: """ Abstract method to prepare model. Depending on the learner, this function will initialise the underlying estimator and the objects needed to do that Args: X_train (np.ndarray): Input training data y_train (np.ndarray): Target training data Returns: """ raise NotImplementedError @abstractmethod def _fit(self, X_train: np.ndarray, y_train: np.ndarray, X_val: np.ndarray, y_val: np.ndarray) -> None: """ Method that fits the underlying estimator Args: X_train (np.ndarray): Input training data y_train (np.ndarray): Target training data X_val (np.ndarray): Input validation data y_val (np.ndarray): Output validation data Returns: None """ raise NotImplementedError def fit(self, X_train: np.ndarray, y_train: np.ndarray, X_val: np.ndarray, y_val: np.ndarray) -> Dict[str, Any]: """ Fit the model (possible using the validation set for early stopping) and return the results on the training and validation set. Args: X_train (np.ndarray): Input training data y_train (np.ndarray): Target training data X_val (np.ndarray): Input validation data y_val (np.ndarray): Output validation data Returns: Dict[str, Any]: Dictionary containing the results. see _get_results() """ X_train = self._preprocess(X_train) X_val = self._preprocess(X_val) self._prepare_model(X_train, y_train) self._fit(X_train, y_train, X_val, y_val) results = self._get_results(X_train, y_train, X_val, y_val) return results def score(self, X_test: np.ndarray, y_test: Union[np.ndarray, List]) -> float: """ Score the model performance on a test set. Args: X_test (np.ndarray): Input data y_test (Union[np.ndarray, List]): Target data Returns: float: score on the selected metric """ y_pred = self.predict(X_test, predict_proba=self.is_classification) return self.metric(y_test, y_pred) def predict(self, X_test: np.ndarray, predict_proba: bool = False, preprocess: bool = True) -> np.ndarray: """ predict the model performance on a test set. Args: X_test (np.ndarray): Input data predict_proba (bool, default=False): if task is a classification task, predict the class probabilities preprocess (bool, default=True): Whether to preprocess data or not Returns: """ assert self.model is not None, "No model found. Can't " \ "predict before fitting. " \ "Call fit before predicting" if preprocess: X_test = self._preprocess(X_test) if predict_proba: if not self.is_classification: raise ValueError("Can't predict probabilities for a regressor") return self.model.predict_proba(X_test) y_pred = self.model.predict(X_test) return y_pred def _get_results(self, X_train: np.ndarray, y_train: np.ndarray, X_val: np.ndarray, y_val: np.ndarray) -> Dict[str, Any]: """ Gather results of the training. The following results are calculated: 1. val_preds: validation predictions 2. train_preds: training predictions 3. val_score: score on validation set 4. train_score: score on the training set Args: X_train (np.ndarray): Input training data y_train (np.ndarray): Target training data X_val (np.ndarray): Input validation data y_val (np.ndarray): Output validation data Returns: Dict[str, Any]: Dictionary containing the results """ pred_train = self.predict(X_train, predict_proba=self.is_classification, preprocess=False) pred_val = self.predict(X_val, predict_proba=self.is_classification, preprocess=False) results = dict() results["val_preds"] = pred_val.tolist() results["labels"] = y_val.tolist() results["train_score"] = self.metric(y_train, pred_train) results["val_score"] = self.metric(y_val, pred_val) return results
N=int(input()) divisor_set=set() for i in range(1,int(N**0.5)+1): if N%i==0: divisor_set.add(i) divisor_set.add(N//i) #print(divisor_set) def f(x,y): return max(len(str(x)),len(str(y))) min_answer=float("inf") for d in divisor_set: d1=d d2=N//d min_answer=min(min_answer,f(d1,d2)) print(min_answer)
class Telescope: """ Collect useful telescope parameters. Args: longitude (scalar-like): Earth coordinate with the location of the telescope in degrees. latitude (scalar-like): Earth coordinate with the location of the telescope in degrees. elevation (scalar-like): Earth elevation above sea level of the telescope in meters. fratio (scalar-like): F-ratio or beam speed (focal length over diameter) for the telescope focal plane. platescale (scalar-like): Telescope platescale in mm/arcsec. throughput (:obj:`float`, :class:`~enyo.etc.efficiency.Efficiency`, :class:`~enyo.etc.efficiency.CombinedEfficiency`, optional): The throughput of the telescope from the top of the telescope to the focal plane. area (scalar-like, optional): The true or effective area of the telescope aperture in square centimeters. If not provided, calculated using ``diameter``. Must be provided if ``diameter`` is not. diameter (scalar-like, optional): Telescope diameter in meters. If provided, used to set the telescope area. Must be provided if `area` is not. obstruction (scalar-like, optional): The unitless fraction of the total telescope area lost due to the central obstruction. If provided, the telescope area is multiplied by (1-`obstruction`) to obtain its effective area. If not provided, the `area` or `diameter` is assumed to account for the central obstruction. Raises: ValueError: Raised if both or neither of `diameter` or `area` are provoded. """ def __init__(self, longitude, latitude, elevation, fratio, platescale, throughput=1., area=None, diameter=None, obstruction=None): self.longitude = longitude self.latitude = latitude self.elevation = elevation self.fratio = fratio self.platescale = platescale self._throughput = throughput # If area is provided, use it directly: if area is not None: self.area = area if diameter is not None: raise ValueError('Cannot provide area and diameter; provide one or the other.') self.diameter = numpy.sqrt(area/numpy.pi)*2/100 if obstruction is not None: warnings.warn('Obstruction and area provided, combining to get effective area.') elif diameter is not None: self.diameter = diameter self.area = numpy.pi*numpy.square(self.diameter*100/2) else: raise ValueError('Must provide area or diameter!') # Apply the central obsruction if provided. if obstruction is not None: self.area *= (1-obstruction) @property def throughput(self): #, wave=None): if self._throughput is None: # TODO: Should this really raise an error... raise ValueError('Throughput not defined.') return self._throughput
Four giants would be on you, and sometimes it was as much as you could do to see a team-mate’s shirt to pass to. You would stand next to them in the tunnel and see these huge athletes, who were all fast, all consistent, all ever-present. Even after eight years without a trophy, those images are still sharp in the memory when we talk about Arsenal’s struggle to return to the heights they reached with the Invincibles. Of course Arsenal , who face Swansea at 5.30pm on Saturday and Napoli in the Champions League on Tuesday, no longer have the strength of Vieira and Petit, the pace in wide areas of Robert Pires and Freddie Ljungberg and the guile of Dennis Bergkamp, not to mention the firepower of Thierry Henry, Robin van Persie and Ian Wright. After the exodus of the last few years – Cesc Fabregas, Van Persie, Samir Nasri – Arsenal fans await that turning point where strength is being gained instead of lost. They are top of the Premier League and won their first Champions League fixture in Marseille. Has the tide finally turned? From the start I had my reservations. They seemed not to be spending but then came up with one big investment right at the end of the transfer window. From the back: the goalkeeper, Wojciech Szczesny is capable but I would have the keeper of all the other top teams ahead of Arsenal’s. David de Gea (Manchester United), Joe Hart (Manchester City), Petr Cech (Chelsea), Hugo Lloris (Tottenham) and Simon Mignolet (Liverpool) would all get my vote ahead of Szczesny. In defence, Arsenal are decent, but again none of their back four would make a composite Premier League XI. If I look right the way through the Arsenal team, with a view to picking an all-star side from the top six clubs, I would struggle to nominate one Arsenal player. Mesut Özil is the only one who would have a chance. Clearly Arsenal have a fabulous manager, a wonderful stadium and a great fan base. You would tend to give Arsène Wenger the benefit of the doubt but the depth of quality is not yet there in this squad. If Aaron Ramsey and Olivier Giroud went back to being the players they were last year Arsenal would start to look fairly average by their standards. Ramsey looks a totally different player this term and will need to sustain that improvement. Theo Walcott is now missing for a few games, and without that depth of quality I wonder how many players they have who are capable of winning trophies. On the other hand, they have points on the board, started well in the cup competitions and are playing with more fluency and confidence. All that looks rosy, but I would not get carried away by these early signs of promise because they still need to strengthen in one or two positions. The Özil deal lifted the spirits of Arsenal fans, and kept the critics quiet, but they still need a couple more of that calibre to be considered title contenders. Giroud has made an excellent start to the season, but, again, he would not make the starting XIs of Manchester United or City. If United have an injury to Wayne Rooney or Robin van Persie they can call on Javier Hernandez or Danny Welbeck. City can choose from Alvaro Negredo, Edin Dzeko and Sergio Agüero. Giroud is a very good player, but no Henry, Wright or Van Persie. I want Arsenal to do well. I love their heritage. As a 13-year-old I was invited there as a potential recruit. Ian Wright looked after me and took me in to see George Graham, the manager. I was starstruck. The distance from home ruled them out but everything about Arsenal impressed me. They always have their badge to fall back on, and will always be attractive to players, for their history, their stadium and location, in London. But I would need to see more evidence of consistent improvement. They have played one or two teams at the right time, notably Sunderland away. The last days of Paolo Di Canio were a cracking time to catch them. They have not faced many big tests yet, and when they look at their neighbours, Spurs, who have really caught my eye, they see £100million of new investment. My respect for Wenger stems from the revolutionary effect he has had on the English game. Up until 2004 or so he really made Manchester United stretch. Arsenal were the best team in the country. The Invincibles were sensational. Wenger took sports science and professionalism to a new level, and forced the likes of Sir Alex Ferguson to change their own outlook. That early success still protects Wenger from those who say eight years without a trophy is unacceptable. That barren run is one of the great anomalies because Arsenal have always been well run and wealthy. The confusion starts with their failure to improve the squad over the past few years. The only explanation I can come up with is Wenger’s resistance to inflated transfer fees, which he has always disliked. His calling card was good value signings who turned out to be worth more than they cost. It was as if Wenger could not bring himself to splash out because it was never his way of doing things and he did not agree with it. If you can’t beat them, you just have to join them, even if the prices are inflated. Wenger stood back from that until the Özil transfer, which, I suspect, was to ease the pressure building from Arsenal’s fans, who were angry that no marquee signing had been made earlier in the summer. It was too late to bring two or three in, so they went for one superstar, whatever the cost, to keep everyone content. Özil is a world-class player and has started particularly well. That level of purchase is what the fans want, the club deserve and the team need – and they need to stay at that end of the market if they are to return to the days when Vieira and Petit, Adams and Campbell, made it so hard to keep the ball when all four of them bore down on you.
Mike Huckabee speaks at rally (Screenshot) Fox News host Mike Huckabee on Thursday compared the effort to prevent LGBT people from having equal marriage rights to fighting against Nazi Germany. Speaking at the 2014 March for Marriage in Washington, D.C., Huckabee said that there was “no basis in the law” that gave a single judge the right to strike down same-sex marriage bans, like Sixth Judicial Circuit Judge Chris Piazza did in Arkansas in 2012. “Nothing threatens your personal liberty more than the notion that you would bow your knee to the court system apart from the ultimate rule of the Constitution,” he opined. “And all of the branches of government, all of which are not there to tell you what you cannot do, but to guarantee the freedoms that you are always empowered to have.” “We are under an obligation to obey God and the law, and if necessary, to defy an institution that is out of control,” the former Arkansas governor continued. To make his point, Huckabee quoted from a letter that Martin Luther King’s Jr. wrote while spending eight days in the Birmingham Jail for fighting to end segregation. “One may well ask, ‘How can you advocate breaking some laws and obeying others?'” King had written. “The answer is found in the fact that there are two types of laws: there are just laws, and there are unjust laws. I would agree with St. Augustine that ‘An unjust law is no law at all.'” Huckabee continued reading from King’s letter: “We can never forget that everything Hitler did in Germany was “legal” and everything the Hungarian freedom fighters did in Hungary was ‘illegal.’ It was ‘illegal’ to aid and comfort a Jew in Hitler’s Germany. But I am sure that if I had lived in Germany during that time, I would have aided and comforted my Jewish brothers even though it was illegal.” “I wish I had penned those words,” Huckabee exclaimed. “But they were penned by someone who understood freedom, and understood that there was a time to stand up against law when it has become unjust. Those are the words that were penned in 1954 by Martin Luther King Jr. in his letter from the Birmingham Jail.” “I wish he were here today to say in the people in the building this one, Mr. Supreme Court justices, Madam Supreme Court justices, your role is only to interpret the law, to make sure that it somehow meshes with the Constitution, not that it messes with the Constitution!” he shouted. In fact, Coretta Scott King, the widow of Dr. King, has repeatedly suggested that her husband would have backed equal rights for LGBT people. “Freedom from discrimination based on sexual orientation is surely a fundamental human right in any great democracy, as much as freedom from racial, religious, gender, or ethnic discrimination,” she told the National Gay and Lesbian Task Force in 2000. Watch the video below from the National Organization for Marriage, broadcast June 19, 2014.
<reponame>tangfeixiong/go-for-docker // +build !cgo package server import ( "fmt" // "time" "github.com/tangfeixiong/go-to-docker/pb" // "github.com/tangfeixiong/go-to-openstack-bootcamp/kopos/kopit/pkg/gopacketctl" ) func (m *myService) sniffEtherNetworking(req *pb.EthernetSniffingData) (*pb.EthernetSniffingData, error) { resp := new(pb.EthernetSniffingData) resp.StateCode = 999 resp.StateMessage = "Not running with CGO disabled" return resp, fmt.Errorf(resp.StateMessage) // if nil == req || "" == req.Iface { // resp.StateCode = 10 // resp.StateMessage = "Request required" // return resp, fmt.Errorf("Request required") // } // content, err := gopacketctl.PcapdumpOnce(req.Iface, time.Second*3) // if nil != err { // resp.StateCode = 100 // resp.StateMessage = err.Error() // return resp, err // } // resp.StatsAndPackets = content // return resp, err }
// Average returns the average of a slice of numbers. func Average[T Number](values []T) (ret T) { for _, v := range values { ret += v } return ret / T(len(values)) }
// ResizeNRGBA resizes the image, and returns a pointer to an image that // uses the NRGBA64 format. // // Use this if you intend to write the image to a 16-bits-per-sample PNG // file. func (fp *FPObject) ResizeToNRGBA64() (*image.NRGBA64, error) { dstFPImage, err := fp.resizeMain() if err != nil { return nil, err } nrgba64 := fp.convertDst_NRGBA64(dstFPImage) return nrgba64, nil }
// IsTypeInterface reports whether v is an interface. func IsTypeInterface(t reflect.Type) bool { if t == reflect.TypeOf(nil) { return false } return t.Kind() == reflect.Interface }
<filename>src/chapter4_While_DoWhile_For_Nested_Loops/AddNumbers.java<gh_stars>1-10 package chapter4_While_DoWhile_For_Nested_Loops; import java.util.Scanner; /* * DO WHILE LOOP: * Write a program that allows a user to enter two numbers, * and then sums up the two numbers. The user should be able to * repeat this action until they indicate they are done. */ public class AddNumbers { public static void main(String[] args) { Scanner scanner = new Scanner(System.in); boolean tryagain; do { System.out.println("Enter the first number"); double num1 = scanner.nextDouble(); System.out.println("Enter the second number"); double num2 = scanner.nextDouble(); System.out.println("The sum is " + (num1 + num2)); System.out.println("Would you like to start over: [true] or [false]?"); tryagain = scanner.nextBoolean(); } while (tryagain); scanner.close(); } }
#include "Document.h" #include <SDL2/SDL_log.h> #include "DocumentElement.h" #include "HTMLCanvasElement.h" #include "Body.h" JSC_CONSTRUCTOR(Document::Constructor) { Document &document = CreateNativeInstance(); document.object.setProperty("body", Body::CreateJSObject({})); document.object.setProperty("documentElement", DocumentElement::CreateJSObject({})); document.object.setProperty("readyState", JSC::Value("complete")); // pretend the DOM is ready. return document.object; } JSC_FINALIZER(Document::Finalizer) { FreeNativeInstance(object); } JSC_FUNCTION(Document::createElement) { std::string elementName = JSC::Value(argv[0]).toString().getUTF8String(); if (elementName == "canvas") { return HTMLCanvasElement::CreateJSObject({}); } else { SDL_LogWarn(0, "Document.createElement('%s') is currently not supported.\n", elementName.c_str()); return JSC::Object::MakeDefault(); } } JSC::Class &Document::GetClassRef() { if (!_class) { static JSStaticFunction staticFunctions[] = { { "createElement", Document::createElement, kJSPropertyAttributeDontDelete }, { 0, 0, 0 } }; JSClassDefinition classDefinition = kJSClassDefinitionEmpty; classDefinition.className = "Document"; classDefinition.attributes = kJSClassAttributeNone; classDefinition.staticFunctions = staticFunctions; classDefinition.callAsConstructor = Document::Constructor; classDefinition.finalize = Document::Finalizer; _class = JSC::Class(&classDefinition); } return _class; }
import { parsePostSummary } from '@lib/notion/utils'; import { PostSummary, Tag } from '@lib/types'; import { Client } from '@notionhq/client/build/src'; import { CheckboxPropertyValue, Filter, MultiSelectFilter, MultiSelectProperty, } from '@notionhq/client/build/src/api-types'; export const notionClient = new Client({ auth: process.env.NOTION_INTEGRATION_TOKEN, }); const isProduction = process.env.NODE_ENV === 'production'; export async function getPublishedPosts( multiSelectFilter?: MultiSelectFilter, ): Promise<PostSummary[]> { const list: Filter[] = [ { property: 'published', checkbox: { equals: true, }, }, ]; if (!isProduction) { list.pop(); } if (multiSelectFilter) { list.push(multiSelectFilter); } const { results } = await notionClient.databases.query({ database_id: process.env.NOTION_PAGE_ID, filter: { and: [...list], }, sorts: [ { property: 'created', direction: isProduction ? 'descending' : 'ascending', }, ], }); return results.map((result) => { const { id, title, summary, tags } = parsePostSummary(result); return { id, title, summary, tags, }; }); } export async function getPostSummary(postId: string) { const response = await notionClient.pages.retrieve({ page_id: postId, }); const isPublished = (response.properties?.published as CheckboxPropertyValue) .checkbox; if (!isPublished && isProduction) { throw Error(); } return parsePostSummary(response); } export async function getCategories(): Promise<(string | undefined)[]> { const { properties } = await notionClient.databases.retrieve({ database_id: process.env.NOTION_PAGE_ID, }); return (properties?.tags as MultiSelectProperty).multi_select.options.map( (option) => option.name, ); } export async function getAllPostsWithTag(): Promise<Tag[]> { const validTags = [...(await getPublishedPosts())] .map((post) => post.tags) .flat(1); return Array.from(new Set(validTags)) .map((name) => { return { label: name, count: [...validTags.filter((t) => t === name)].length, }; }) .sort((a, b) => b.count - a.count); }
(Reuters) - Singer-guitarist Glen Campbell, the “Rhinestone Cowboy” who went on a farewell tour to play hits such as “Wichita Lineman” and “Gentle on My Mind” before Alzheimer’s disease robbed him of talents, died on Tuesday at the age of 81. Campbell died in Nashville at an Alzheimer’s facility surrounded by his family, his publicist Sanford Brokaw said. “It is with the heaviest of hearts that we announce the passing of our beloved husband, father, grandfather, and legendary singer and guitarist, Glen Travis Campbell, at the age of 81, following his long and courageous battle with Alzheimer’s disease,” his family said in a statement posted on the singer’s official website. Campbell announced in June 2011 that he was suffering from Alzheimer’s. The “Gentle on My Mind” singer then embarked on a nationwide farewell tour that ended in November 2012. Campbell began his career as a well-regarded recording session guitarist in Los Angeles before becoming a fixture on the U.S. music charts, radio and television in the 1960s and ‘70s. He won six Grammy Awards and had nine No. 1 songs in a career of more than 50 years. He released a final studio album in June 2017, called “Adios,” that was recorded after the tour wrapped up. The tour was captured in a documentary aired in 2015 by CNN, “Glen Campbell: I’ll Be Me,” that movingly captured him struggling with the disease. His song “I’m Not Going to Miss You” from the movie’s soundtrack won a Grammy Award for best country song. Related Coverage Factbox: Eight facts about veteran country musician Glen Campbell His death on Tuesday brought tributes from country music stars. Singer Brad Paisley thanked Campbell on Twitter “for the artistry, grace & class you brought to country music. You were a shining light in so many ways.” Dolly Parton released a short tribute video and tweeted that he “was one of the greatest voices of all time. I will always love you, Glen!” On his last tour, Campbell had three of his eight children playing in his backup band and providing emotional support. He clearly had memory problems on the tour, even forgetting in an interview that he suffered from Alzheimer’s. While he sometimes stumbled over lyrics or had trouble with his guitar work, critics said he still showed flashes of brilliance. “His fingers are still able to float over his guitar neck with a fluid ease and his voice can hit notes that lesser vocalists ... can barely brush against,” a Los Angeles Times critic said of a June 2012 performance. In 2010 Campbell began having occasional trouble recalling lyrics or guitar licks. He was formally diagnosed with Alzheimer’s that year at age 75 and revealed it in a People magazine interview in July 2011. “Glen is still an awesome guitar player and singer,” his wife, Kim Woollen, told People. “But if he flubs a lyric or gets confused onstage, I wouldn’t want people to think, ‘What’s the matter with him? Is he drunk?’” FILE PHOTO: American country music artist Glen Campbell performs during the Country Music Association (CMA) Music Festival in Nashville, Tennessee June 7, 2012. REUTERS/Harrison McClary/File Photo Campbell was one of 12 children of two sharecroppers in southwestern Arkansas. He immediately took to the cheap guitar his father bought him and played in his uncle’s band. After a teenage marriage and divorce, he married again and headed to Los Angeles in the late 1950s. There Campbell became part of the “Wrecking Crew,” a group of highly sought-after session players who recorded with Elvis Presley, Frank Sinatra, Dean Martin, the Monkees and Nat King Cole. Campbell also became a touring member of the Beach Boys, filling in for Brian Wilson, who did not like to go on the road. When he set out on his own, Campbell had a breakthrough hit with “Gentle on My Mind” in 1967, quickly followed by the break-up song “By the Time I Get to Phoenix” and “Wichita Lineman,” a song about loneliness that Campbell said was his favorite and one of his many collaborations with songwriter Jimmy Webb. Recordings of “Phoenix” and “Gentle on My Mind” earned Campbell five Grammy Awards and the boy from tiny Delight, Arkansas, became a much-sought superstar. He starred opposite John Wayne in the 1969 film “True Grit” and earned an Oscar nomination for singing the movie’s theme song. That same year he began a three-year run on television with “The Glen Campbell Goodtime Hour” variety show. He opened each show with a wall-to-wall grin and proclaimed in a down-home twang, “Hi, I’m Glen Campbell!” Later hits included “Galveston,” “Dreams of the Everyday Housewife” and the huge pop-crossover song “Rhinestone Cowboy” about trying to make it in the music business. He sang “Rhinestone Cowboy” at the 2012 Grammy ceremony, where he was given a lifetime achievement award. Campbell’s stardom led to excess and personal troubles. Starting in 1980, he had a tumultuous relationship with country singer Tanya Tucker, who was more than 20 years younger, and who described him as “the horniest man I’ve ever known.” They recorded duets and toured together and planned to marry but their 14-month affair ended after physical fights and cocaine use. Slideshow (9 Images) Campbell’s third marriage to Sarah Davis, former wife of country singer Mac Davis, also was marred by drug abuse and drinking but he eventually sobered up with fourth wife, Woollen. Campbell relapsed in 2003 and served 10 days in jail for drunken driving in Arizona - a case that produced an embarrassing mugshot of a scowling, disheveled Campbell that went viral on the Internet. After his success began to ebb in the 1990s, Campbell concentrated on performing at his theater in the country music resort town of Branson, Missouri, and spent more time pursuing his passion for golf.
Every year bikepacking bags are moving forward in leaps and bounds; after all, much of this gear isn’t even a decade old. To some people’s dismay, most bikepacking bags are no longer made out of the spare bedroom of a share house on old sewing machines. Instead, a healthy injection of money within the industry is resulting in better materials, cleaner designs, lighter weights and increased functionality. This article looks at the latest in bikepacking design and innovation. These are the ideas that will influence the entire industry in the coming years, if they haven’t already. Don’t forget to check out: The Complete List of Bikepacking Bag Manufacturers. Stabilised Rear Seat Packs When packed well, most high-quality bikepacking bags do a pretty good job of staying still on your seatpost. But do a quick and thoughtless packing job and you’ll experience ‘bag sway’ or ‘tail wag’. A handful of bikepacking bag manufacturers have decided to innovate in ways that can prevent swaying, by using metal structures that increase bag rigidity. Brands like PDW simply employ a lightweight drybag rack, while others like Porcelain Rocket, Specialized and Arkel use rail supports. The Bedrock Coconino is a bit different as it uses shaped metal that is connected to the saddle rails to neutralise swaying. Handlebar Cradles Let’s face it, it’s really annoying having a handlebar roll that butts up against your fingers. It’s also annoying when bar bags slowly move downwards towards your front wheel over bumps, or worse, get in contact. But a few innovators are here to ensure that your handlebar bag stays put with specially-designed drybag cradles. The only downside to cradle systems is that they can sometimes be twice the weight of regular bar bags. Look to Specialized, Salsa and Blackburn for your cradle needs. Waterproof Bikepacking Bags It seemed crazy that up until the last year or two, waterproof bikepacking bags were essentially non-existent (with the exception of holsters/harnesses + drybags). Now we’re seeing a handful of brands make product that requires no internal dry bags. I must say, it’s pretty nice to be able to pack gear like sleeping bags into the one water-tight bag. Waterproof bags are available from Apidura, Arkel, Specialized, Bike Bag Dude and more. Cuben Fibre Bikepacking Bags When you mutter the words ‘cuben fiber’, it’s normally enough to melt many lightweight gear nerds into oblivion. Luckily for the lightweight crowd, this tech has made its way into the bikepacking scene. Bike Bag Dude recently weighed the five panels required for a custom frame bag; the weight… 9 grams (0.3oz)! While many brands are working with cuben fiber, there are no off-the-shelf products available. Everything is custom made. Bikepacking Bags For The Front Or Rear Arkel are currently working on their Rollpacker bags which are designed to fit both the front and rear of the bike. That’s going to make these bikepacking bags much more versatile and lendable to friends. In addition, the Rollpacker will have a 25 litre capacity option – that’s huge! Integrated Frame Bags Integrated frame bags are excellent for two reasons: the strapless design doesn’t rub your frame, and they look super clean. At the moment you need a frame with custom mounts, but given bike brands like Specialized and Salsa now make bags, I’m sure we’ll soon see mainstream bikes ready for integrated bags. Modular Frame Bags Sometimes you want two bidons for their quick, easy access. Other times it’s all about maximising storage. Whether you want a full or half frame bag, J.Paks can make a custom modular system that velocros together. That’s pretty neat! Bikepacking Bags for Dropper Seatposts For those bikepacking on trails where it gets a bit hairy, there will soon be some great dropper post -compatible bags. The Porcelain Rocket prototype looks the best at this stage, but Arkel are marketing their Seatpacker as compatible as well. Handlebar Harnesses and Seat Pack Holsters This design is by no means new, but it’s undoubtedly innovative. The key advantage to these two-piece designs are that you can quickly remove just the drybag, rather than the whole bikepacking bag off the bike. You’ll find lots of brands such as Blackburn, Revelate and Porcelain Rocket using this design. Drybag Zippers Front handlebar drybags are pretty inaccessible on the bike, which is why they’re a popular location to store sleeping gear. But the Specialized Burra Burra drybag changes that by adding a waterproof zipper across the top. Throw your wallet and phone in the front and don’t worry about strapping on those additional accessory bags. What Bikepacking Bag Tech and Innovation Do You Appreciate The Most?
<filename>uhk/acm3743(2).cpp<gh_stars>0 #include <cstdio> #include <cstring> #include <algorithm> using namespace std; const int maxn = 1000000 + 100; typedef long long ll; struct Node { int index; int val; }; Node ar[maxn]; int C[maxn]; int b[maxn]; bool comp(const Node& a, const Node& b) { return a.val < b.val; } int sum(int x) { int s = 0; while (x) { s += C[x]; x -= x & (-x); } return s; } void update(int x, int val, int n) { while (x <= n) { C[x] += val; x += x & (-x); } } int main() { int i, j, k, u, n, m, a; while (scanf("%d", &n) != EOF) { for (i = 1; i <= n; i++) { scanf("%d", &ar[i].val); ar[i].index = i; } sort(ar + 1, ar + 1 + n, comp); for (i = 1; i <= n; i++) { b[ar[i].index] = i; } memset(C, 0, sizeof(C)); ll res = 0; for (i = 1; i <= n; i++) { res += i - sum(b[i]) - 1; update(b[i], 1, n); } printf("%lld\n", res); } return 0; }
L- and T-type Ca2+ channels dichotomously contribute to retinal ganglion cell injury in experimental glaucoma Retinal ganglion cell apoptotic death is the main pathological characteristic of glaucoma, which is the leading cause of irreversible blindness. Disruption of Ca2+ homeostasis plays an important role in glaucoma. Voltage-gated Ca2+ channel blockers have been shown to improve vision in patients with glaucoma. However, whether and how voltage-gated Ca2+ channels are involved in retinal ganglion cell apoptotic death are largely unknown. In this study, we found that total Ca2+ current densities in retinal ganglion cells were reduced in a rat model of chronic ocular hypertension experimental glaucoma, as determined by whole-cell patch-clamp electrophysiological recordings. Further analysis showed that L-type Ca2+ currents were downregulated while T-type Ca2+ currents were upregulated at the later stage of glaucoma. Western blot assay and immunofluorescence experiments confirmed that expression of the CaV1.2 subunit of L-type Ca2+ channels was reduced and expression of the CaV3.3 subunit of T-type Ca2+ channels was increased in retinas of the chronic ocular hypertension model. Soluble tumor necrosis factor-α, an important inflammatory factor, inhibited the L-type Ca2+ current of isolated retinal ganglion cells from control rats and enhanced the T-type Ca2+ current. These changes were blocked by the tumor necrosis factor-α inhibitor XPro1595, indicating that both types of Ca2+ currents may be mediated by soluble tumor necrosis factor-α. The intracellular mitogen-activated protein kinase/extracellular signal-regulated kinase pathway and nuclear factor kappa-B signaling pathway mediate the effects of tumor necrosis factor-α. TUNEL assays revealed that mibefradil, a T-type calcium channel blocker, reduced the number of apoptotic retinal ganglion cells in the rat model of chronic ocular hypertension. These results suggest that T-type Ca2+ channels are involved in disrupted Ca2+ homeostasis and apoptosis of retinal ganglion cells in glaucoma, and application of T-type Ca2+ channel blockers, especially a specific CaV3.3 blocker, may be a potential strategy for the treatment of glaucoma.
<reponame>DavidAce/DMRG // // Created by david on 2019-10-13. // #pragma once #include <complex> #include <unsupported/Eigen/CXX11/Tensor> #include <vector> /* clang-format off */ class class_state_finite; class class_model_finite; class class_edges_finite; class class_tensors_finite; class class_mpo_site; class class_algorithm_status; struct tensors_measure_finite; namespace tools::finite::measure{ using Scalar = std::complex<double>; extern void do_all_measurements(const class_tensors_finite & tensors); extern void do_all_measurements(const class_state_finite & state); [[nodiscard]] extern size_t length (const class_tensors_finite & tensors); [[nodiscard]] extern size_t length (const class_state_finite & state); [[nodiscard]] extern size_t length (const class_model_finite & model); [[nodiscard]] extern size_t length (const class_edges_finite & edges); [[nodiscard]] extern long bond_dimension_current (const class_state_finite & state); [[nodiscard]] extern long bond_dimension_midchain (const class_state_finite & state); [[nodiscard]] extern std::vector<long> bond_dimensions_merged (const class_state_finite & state); [[nodiscard]] extern std::vector<long> bond_dimensions (const class_state_finite & state); [[nodiscard]] extern double norm (const class_state_finite & state); // [[nodiscard]] extern double norm_fast (const class_state_finite & state); [[nodiscard]] extern double spin_component (const class_state_finite & state, const Eigen::Matrix2cd &paulimatrix); [[nodiscard]] extern double spin_component (const class_state_finite & state, const std::string & axis); [[nodiscard]] extern Eigen::Tensor<Scalar,1> mps_wavefn (const class_state_finite & state); [[nodiscard]] extern double entanglement_entropy_current (const class_state_finite & state); [[nodiscard]] extern double entanglement_entropy_midchain (const class_state_finite & state); [[nodiscard]] extern std::vector<double> entanglement_entropies (const class_state_finite & state); [[nodiscard]] extern std::vector<double> renyi_entropies (const class_state_finite & state, double q); [[nodiscard]] extern double number_entropy_current (const class_state_finite & state); [[nodiscard]] extern double number_entropy_midchain (const class_state_finite & state); [[nodiscard]] extern std::vector<double> number_entropies (const class_state_finite & state); [[nodiscard]] extern std::array<double,3> spin_components (const class_state_finite & state); [[nodiscard]] extern std::vector<double> truncation_errors (const class_state_finite & state); [[nodiscard]] extern std::vector<double> truncation_errors_active (const class_state_finite & state); template<typename state_or_mps_type> [[nodiscard]] double energy_minus_energy_reduced (const state_or_mps_type & state, const class_model_finite & model, const class_edges_finite & edges, tensors_measure_finite * measurements = nullptr); template<typename state_or_mps_type> [[nodiscard]] double energy (const state_or_mps_type & state, const class_model_finite & model, const class_edges_finite & edges, tensors_measure_finite * measurements = nullptr); template<typename state_or_mps_type> [[nodiscard]] double energy_per_site (const state_or_mps_type & state, const class_model_finite & model, const class_edges_finite & edges, tensors_measure_finite * measurements = nullptr); template<typename state_or_mps_type> [[nodiscard]] double energy_variance (const state_or_mps_type & state, const class_model_finite & model, const class_edges_finite & edges, tensors_measure_finite * measurements = nullptr); template<typename state_or_mps_type> [[nodiscard]] double energy_variance_per_site (const state_or_mps_type & state, const class_model_finite & model, const class_edges_finite & edges, tensors_measure_finite * measurements = nullptr); template<typename state_or_mps_type> [[nodiscard]] double energy_normalized (const state_or_mps_type & state, const class_model_finite & model, const class_edges_finite & edges, double energy_minimum, double energy_maximum, tensors_measure_finite * measurements = nullptr); [[nodiscard]] extern double energy_reduced (const class_tensors_finite & tensors); [[nodiscard]] extern double energy_per_site_reduced (const class_tensors_finite & tensors); [[nodiscard]] extern double energy_minus_energy_reduced (const class_tensors_finite & tensors); [[nodiscard]] extern double energy (const class_tensors_finite & tensors); [[nodiscard]] extern double energy_per_site (const class_tensors_finite & tensors); [[nodiscard]] extern double energy_variance (const class_tensors_finite & tensors); [[nodiscard]] extern double energy_variance_per_site (const class_tensors_finite & tensors); [[nodiscard]] extern double energy_normalized (const class_tensors_finite & tensors, double energy_minimum, double energy_maximum); [[nodiscard]] extern double energy_minus_energy_reduced(const class_state_finite & state, const class_tensors_finite & tensors); [[nodiscard]] extern double energy (const class_state_finite & state, const class_tensors_finite & tensors); [[nodiscard]] extern double energy_per_site (const class_state_finite & state, const class_tensors_finite & tensors); [[nodiscard]] extern double energy_variance (const class_state_finite & state, const class_tensors_finite & tensors); [[nodiscard]] extern double energy_variance_per_site (const class_state_finite & state, const class_tensors_finite & tensors); [[nodiscard]] extern double energy_normalized (const class_state_finite & state, const class_tensors_finite & tensors, double energy_minimum, double energy_maximum); [[nodiscard]] extern double energy_minus_energy_reduced (const Eigen::Tensor<Scalar,3> &mps, const class_tensors_finite & tensors); [[nodiscard]] extern double energy (const Eigen::Tensor<Scalar,3> &mps, const class_tensors_finite & tensors); [[nodiscard]] extern double energy_per_site (const Eigen::Tensor<Scalar,3> &mps, const class_tensors_finite & tensors); [[nodiscard]] extern double energy_variance (const Eigen::Tensor<Scalar,3> &mps, const class_tensors_finite & tensors); [[nodiscard]] extern double energy_variance_per_site (const Eigen::Tensor<Scalar,3> &mps, const class_tensors_finite & tensors); [[nodiscard]] extern double energy_normalized (const Eigen::Tensor<Scalar,3> &mps, const class_tensors_finite & tensors, double energy_minimum, double energy_maximum); } /* clang-format on */
/// This method sets a new size in the unscaling layer. /// It also sets the members to their default values. void UnscalingLayer::set(const unsigned int& new_unscaling_neurons_number) { minimums.set(new_unscaling_neurons_number, -1.0); maximums.set(new_unscaling_neurons_number, 1.0); means.set(new_unscaling_neurons_number, 0.0); standard_deviations.set(new_unscaling_neurons_number, 1.0); set_default(); }
<reponame>courtneyeh/teku<filename>data/beaconrestapi/src/main/java/tech/pegasys/teku/beaconrestapi/handlers/v1/beacon/AbstractGetSimpleDataFromState.java /* * Copyright ConsenSys Software Inc., 2022 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package tech.pegasys.teku.beaconrestapi.handlers.v1.beacon; import static tech.pegasys.teku.beaconrestapi.BeaconRestApiTypes.PARAMETER_STATE_ID; import com.fasterxml.jackson.core.JsonProcessingException; import java.util.Optional; import tech.pegasys.teku.api.ChainDataProvider; import tech.pegasys.teku.beaconrestapi.MigratingEndpointAdapter; import tech.pegasys.teku.infrastructure.async.SafeFuture; import tech.pegasys.teku.infrastructure.restapi.endpoints.AsyncApiResponse; import tech.pegasys.teku.infrastructure.restapi.endpoints.EndpointMetadata; import tech.pegasys.teku.infrastructure.restapi.endpoints.RestApiRequest; import tech.pegasys.teku.spec.datastructures.metadata.StateAndMetaData; public abstract class AbstractGetSimpleDataFromState extends MigratingEndpointAdapter { private final ChainDataProvider chainDataProvider; public AbstractGetSimpleDataFromState( final EndpointMetadata build, final ChainDataProvider chainDataProvider) { super(build); this.chainDataProvider = chainDataProvider; } @Override public void handleRequest(RestApiRequest request) throws JsonProcessingException { final SafeFuture<Optional<StateAndMetaData>> future = chainDataProvider.getBeaconStateAndMetadata(request.getPathParameter(PARAMETER_STATE_ID)); request.respondAsync( future.thenApply( maybeStateAndMetadata -> maybeStateAndMetadata .map(AsyncApiResponse::respondOk) .orElseGet(AsyncApiResponse::respondNotFound))); } }
#include<bits/stdc++.h> #define ll long long int #define hell (ll)(1e9+7) #define vi vector <ll> #define pb push_back #define pii pair<ll,ll> #define vii vector <pii> #define rep(i,a,b) for(ll i=a;i<b;i++) #define sep(i,a,b) for(ll i=a-1;i>=b;i--) #define inf (ll)(1e16) using namespace std; vi adj[50],a[6]; vi counter(6,0); void solve() { ll n,x,ans,c; cin>>n; rep(i,1,n+1) { cin>>x; adj[x].pb(i); } c=0; rep(i,1,50) { if(!adj[i].empty()) a[c++]=adj[i]; } if(c!=6) { cout<<n; return; } ans=0; /*rep(i,0,6) { for(auto ch:a[i]) cout<<ch<<" "; cout<<"\n"; }*/ while(counter[0]<a[0].size()) { rep(i,1,6) { // cout<<counter[i-1]<<" "; while(counter[i]<a[i].size()&&a[i][counter[i]]<a[i-1][counter[i-1]]) { //cout<<a[i][counter[i]]<<" "<<a[i-1][counter[i]]<<" "; counter[i]++; } } bool flag=true; rep(i,0,6) { counter[i]++; flag=flag&&counter[i]<=(ll)a[i].size(); } if(flag) ans+=6; else break; } cout<<n-ans; } int main() { ios_base::sync_with_stdio(false); cin.tie(0); cout.tie(0); ll t=1; //cin>>t; while(t--) solve(); return 0; }
import * as React from 'react'; import './Step.css'; export class Step extends React.Component<{ step: any }, {}> { stepStyle(): string { switch(this.props.step.result.status) { case 'passed': return 'rgb(201, 234, 182)' break; case 'skipped': return 'rgb(239, 235, 117)' break; default: return 'rgb(255, 119, 146)' } } fullName() { return `${this.props.step.keyword} ${this.props.step.name}` } render() { return ( <div className="step" style={{ borderLeft: `10px solid ${this.stepStyle()}` }}> <span className="step__name">{ this.fullName() }</span> <span className="step__status">{ this.props.step.result.status }</span> </div> ); } }
<filename>tests/test_qml.py """Tests for `prettyqt` package.""" import pathlib import pytest from prettyqt import core, qml from prettyqt.utils import InvalidParamError # def test_jsvalue(): # val = qml.JSValue(2) # val["test"] = 1 # assert val["test"].toInt() == 1 # assert "test" in val # assert val.get_value() == 2 def test_jsengine(): engine = qml.JSEngine() engine.install_extensions("translation") engine.eval("") def test_qmlengine(): engine = qml.QmlEngine() obj = core.Object() engine.set_object_ownership(obj, "javascript") with pytest.raises(InvalidParamError): engine.set_object_ownership(obj, "test") assert engine.get_object_ownership(obj) == "javascript" engine.add_plugin_path("") engine.add_import_path("") engine.get_plugin_paths() engine.get_import_paths() def test_qmlapplicationengine(qtlog): with qtlog.disabled(): engine = qml.QmlApplicationEngine() for item in engine: pass path = pathlib.Path.cwd() / "tests" / "qmltest.qml" engine.load_data(path.read_text()) def test_qmlcomponent(): comp = qml.QmlComponent() assert comp.get_status() == "null" # comp.load_url("", mode="asynchronous") comp.get_url() def test_jsvalue(): val = qml.JSValue(1) assert val.get_error_type() is None assert val.get_value() == 1 repr(val) engine = qml.JSEngine() val = engine.new_array(2) val["test1"] = 1 val["test2"] = 2 assert val["test1"] == 1 assert "test2" in val assert len(val) == 2 del val["test2"] for n, v in val: pass val = qml.JSValue.from_object(None, engine) val = qml.JSValue.from_object(1, engine) val = qml.JSValue.from_object(["test"], engine) val = qml.JSValue.from_object(dict(a="b"), engine)
910-P: Psychosocial Characteristics Associated with Diabetes Device Use in Teens with T1D Insulin pumps and CGM devices can help youth with T1D achieve better glycemic control and alleviate self-care burden. However, not all youth want to use such devices. We compared psychosocial characteristics of teens with T1D and their parents/caregivers according to teen pump use vs. non-use as well as teen CGM use vs. non-use. At baseline, teens/parents completed psychosocial surveys assessing teen depressive symptoms , disordered eating behaviors, diabetes-specific family conflict, parent involvement in diabetes care, teen self-care, diabetes burden (PAID-Ped/PR), quality of life, and major life events. Diabetes treatment data were collected by interview and EHR review every 3 months for 18 months. Teens using/starting pump or CGM at all/most visits were considered pump users or CGM users, respectively. Chi-square and t-tests compared characteristics of device (pump, CGM) users with non-users. The sample comprised 301 teens aged 13-17 with T1D (41% male); mean age was15.0±1.3 years, T1D duration 6.5±3.7 years, and A1c 8.5±1.1%. Neither age, sex, nor T1D duration was related to device use. CGM and pump non-users had less favorable survey scores than device users for many psychosocial characteristics (Table). Identifying teens with less favorable psychosocial factors may help target those at risk for device non-use, allowing timely intervention and support by clinicians when initiating diabetes technologies. Disclosure C. Chen: None. L. J. Tinsley: None. L. K. Volkening: None. B. Anderson: None. L. M. Laffel: Consultant; Self; AstraZeneca, Boehringer Ingelheim International GmbH, Dexcom, Inc., Dompe, Insulogic LLC, Janssen Pharmaceuticals, Inc., Laxmi Therapeutic Devices, LifeScan, Lilly Diabetes, Medtronic, Provention Bio, Inc. Funding National Institutes of Health (R01DK095273, T32DK007260, P30DK036836); JDRF (2-SRA-2014-253-M-B)
/// Swaps an arbitrary member name with a replacement. The replacement name /// will have been determined within the ffi definition lib (`vks`). fn filter_member_name(orig: &mut String) { if orig == "type" { mem::replace(orig, "type_".to_string()); } }
/** * Builder for the {@link AutofillSuggestion}. */ public static final class Builder { private int mIconId; private GURL mCustomIconUrl; private Bitmap mCustomIcon; private boolean mIsBoldLabel; private boolean mIsIconAtStart; private boolean mIsDeletable; private boolean mIsMultiLineLabel; private String mFeatureForIPH; private String mItemTag; private String mLabel; private String mSubLabel; private int mSuggestionId; public Builder setIconId(int iconId) { this.mIconId = iconId; return this; } public Builder setCustomIconUrl(GURL customIconUrl) { this.mCustomIconUrl = customIconUrl; return this; } public Builder setCustomIcon(Bitmap customIcon) { this.mCustomIcon = customIcon; return this; } public Builder setIsBoldLabel(boolean isBoldLabel) { this.mIsBoldLabel = isBoldLabel; return this; } public Builder setIsIconAtStart(boolean isIconAtStart) { this.mIsIconAtStart = isIconAtStart; return this; } public Builder setIsDeletable(boolean isDeletable) { this.mIsDeletable = isDeletable; return this; } public Builder setIsMultiLineLabel(boolean isMultiLineLabel) { this.mIsMultiLineLabel = isMultiLineLabel; return this; } public Builder setFeatureForIPH(String featureForIPH) { this.mFeatureForIPH = featureForIPH; return this; } public Builder setItemTag(String itemTag) { this.mItemTag = itemTag; return this; } public Builder setLabel(String label) { this.mLabel = label; return this; } public Builder setSubLabel(String subLabel) { this.mSubLabel = subLabel; return this; } public Builder setSuggestionId(int suggestionId) { this.mSuggestionId = suggestionId; return this; } public AutofillSuggestion build() { assert !mLabel.isEmpty() : "AutofillSuggestion requires the label to be set."; return new AutofillSuggestion(mLabel, mSubLabel, mItemTag, mIconId, mIsIconAtStart, mSuggestionId, mIsDeletable, mIsMultiLineLabel, mIsBoldLabel, mFeatureForIPH, mCustomIconUrl, mCustomIcon); } }
def find_local_ip_addresses(): ip_addresses = set() for name in sorted(interfaces(), key=str.lower): for addresses in ifaddresses(name).values(): for properties in addresses: address = properties.get('addr') if address.startswith('127.'): continue if ':' in address: continue if address: ip_addresses.add(address) return ip_addresses
// Check if the user has access func (a ACL) CheckAccess(userObj UserGroup) bool { if a.allAllowed { return true } if a.users[userObj.User] { return true } for _, group := range userObj.Groups { if a.groups[group] { return true } } return false }
// Release do the resource release to tbs-server func (m *Mgr) Release(req *v2.ParamRelease) error { blog.Infof("resource: try to release dist-resource task(%s) for work(%s)", req.TaskID, m.work.ID()) if !m.launched || m.taskInfo == nil { blog.Errorf("resource: release dist-resource task(%s) for work(%s) failed: task no launched", req.TaskID, m.work.ID()) return types.ErrTaskCannotBeReleased } req.TaskID = m.taskInfo.TaskID if m.released { blog.Errorf("resource: release dist-resource task(%s) for work(%s) failed: task already released", req.TaskID, m.work.ID()) return types.ErrTaskCannotBeReleased } defer func() { m.released = true }() var data []byte _ = codec.EncJSON(req, &data) blog.Debugf("resource: release dist-resource task(%s) for work(%s) info: %s", req.TaskID, m.work.ID(), string(data)) if m.heartbeatCancel != nil { m.heartbeatCancel() } _, _, err := m.request("POST", m.serverHost, releaseDistributeResourcesURI, data) if err != nil { blog.Warnf("resource: release dist-resource task(%s) for work(%s) failed: %v", req.TaskID, m.work.ID(), err) return err } blog.Infof("resource: success to release dist-resource task(%s) for work(%s)", req.TaskID, m.work.ID()) return nil }
def process(self, instance): with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance)
#!/usr/bin/env python3 """CG vs PG Classification script using cross-validation """ import numpy as np import os from os.path import expanduser import sys sys.path.append('tools') from numpy import genfromtxt from sklearn import svm import sklearn import csv import utils import logging import multiprocessing as multipr import itertools import pathlib from time import time import argparse from sklearn.model_selection import GridSearchCV HOME = str(pathlib.Path.home()) UNKNOWN_KERNEL_TYPE = 'UNKNOWN' def load_indices_and_labels_crossvalidation(indicesdir, nfolds): """Load indices and labels in @indicesdir folder Args: indicesdir(str): path to the folder containg the indices nfolds(int): number of indices/labels sets Returns: 2-uple: indices and labels. Both are dicts with @nfolds elements """ indices = {} labels = {} for k in range(nfolds): indicespath = os.path.join(indicesdir, 'ids_fold{}.csv'.format(k)) indices[k] = genfromtxt(indicespath, dtype=int) labelspath = os.path.join(indicesdir, 'labels_fold{}.csv'.format(k)) labels[k] = genfromtxt(labelspath, dtype=int) return indices, labels def write_results(header, nfeatures, hits, samplesz, traintime, predtime, avgacc, foldsvar, totaltime, c, ker, outdir): # TODO: Export results in json format # TODO: Unify all exports here (many calls scattered in the code) resultspath = os.path.join(outdir, 'results.log') resultsfh = open(resultspath, 'w') resultsfh.write(header + '\n') resultsfh.write('Number of features:\n') resultsfh.write('{}\n'.format(nfeatures)) resultsfh.write('Number of hits, training and prediction time per fold:\n') for i in range(len(traintime)): resultsfh.write('{}/{},{},{}\n'.format(hits[i], samplesz[i], traintime[i], predtime[i])) resultsfh.write('Folds average accuracy and variance:\n') resultsfh.write('{},{}\n'.format(avgacc, foldsvar)) resultsfh.write('Overall time:\n') resultsfh.write('{}\n'.format(totaltime)) resultsfh.write('SVM kernel and C param:\n') resultsfh.write('{},{}\n'.format(ker, c)) resultsfh.close() def crossvalidate(featurespath, indicesdir, nfolds, outdir): """Perform cross validation using the features set provided by @featurespath, using the indices and labels in @indicesdir, and write the results in outdir Args: featurespath(str): path to the features set indicesdir(str): path to the folder containing the indices and labels nfolds(int): Number of folds of the cross-validation Returns: ret """ print(featurespath) resultspath = os.path.join(outdir, 'results.log') if os.path.exists(resultspath): return start = time() features = genfromtxt(featurespath, delimiter=',') features = sklearn.preprocessing.scale(features, axis=0) indices, labels = load_indices_and_labels_crossvalidation(indicesdir, nfolds) samplesz = [] hits = [] traintime = [] predtime = [] tpsum = 0 ker = UNKNOWN_KERNEL_TYPE # Flag to compute grid search c = -1 acc = np.ndarray([5,]) for k in range(nfolds): valfold = k trainfolds = list(range(nfolds)) trainfolds.remove(k) valfeatures = features[indices[valfold]] vallabels = labels[valfold] trainfeatures = np.ndarray((0, features.shape[1])) trainlabels = np.array([]) for f in trainfolds: trainfeatures = np.concatenate((trainfeatures, features[indices[f]]), axis=0) trainlabels = np.concatenate((trainlabels, labels[f]), axis=0) tp, decvalues, trtime, prtime, c, ker = trainval(trainfeatures, trainlabels, valfeatures, vallabels, c, ker) traintime.append(trtime) predtime.append(prtime) hits.append(tp) tpsum += tp samplesz.append(len(decvalues)) decvaluespath = os.path.join(outdir, 'decvalues_fold{}.csv'.format(k)) with open(decvaluespath, 'w') as fh2: for v in decvalues: fh2.write('{}\n'.format(v)) acc[k] = tp/valfeatures.shape[0] avgacc = tpsum / np.sum(np.array(samplesz)) totaltime = time() - start foldsvar = np.var(acc, ddof=1) write_results(featurespath, features.shape[1], hits, samplesz, traintime, predtime, avgacc, foldsvar, totaltime, c, ker, outdir) def trainval(trainfeatures, trainlabels, valfeatures, vallabels, inc, inker): """Train and predict using SVM Args: trainfeatures(np.ndarray): each row represent the features of a sample trainlabels(np.array): labels (-1 or +1) valfeatures(np.ndarray): validation features valfeatures(np.array): labels (-1 or +1) inc(float): input C parameter of the svm inker(str): input kernel type """ parameters = {'kernel':('linear', 'rbf'), 'C':[0.1, 1, 10]} svc = svm.SVC() if inker == UNKNOWN_KERNEL_TYPE: clf = GridSearchCV(svc, parameters) clf.fit(trainfeatures, trainlabels) c = clf.best_params_['C'] ker = clf.best_params_['kernel'] else: c = inc ker = inker clf = sklearn.svm.SVC(C=c, cache_size=5000, class_weight=None,coef0=0.0, decision_function_shape='ovr', degree=3, gamma='auto', kernel=ker, max_iter=20000, probability=False, random_state=None, shrinking=True, tol=0.001, verbose=True) start = time() clf.fit(trainfeatures, trainlabels) aftertrain = time() predicted = clf.predict(valfeatures) afterprediction = time() decvalues = clf.decision_function(valfeatures) hits = np.equal(predicted, vallabels) tp = np.sum(hits) acc = tp / valfeatures.shape[0] traintime = aftertrain - start predtime = afterprediction - aftertrain return tp, decvalues, traintime, predtime, c, ker def crossvalidate_listinput(l): crossvalidate(*l) def main_classification(featuresdir, indicesdir, outdir, nprocs=1): #if os.path.isdir(outdir): #print('{} exists. Aborting.'.format(outdir)) #return if not os.path.isdir(outdir): os.mkdir(outdir) auxlist = os.listdir(featuresdir) files = [] resultsdirs = [] for e in auxlist: featurespath = os.path.join(featuresdir, e) if not e.endswith('.csv'): continue files.append(featurespath) meth = e.split('.')[0] resultsdir = os.path.join(outdir, meth) resultsdirs.append(resultsdir) if not os.path.isdir(resultsdir): os.mkdir(resultsdir) params = [] for j in range(len(files)): params.append([files[j], indicesdir, 5, resultsdirs[j]]) pool = multipr.Pool(nprocs) pool.map(crossvalidate_listinput, params) def classify_simple_voting(indicesdir, nfolds, resdir, outdir): classify_weighted_voting(indicesdir, nfolds, resdir, outdir, False) def classify_weighted_voting(indicesdir, nfolds, resdir, outdir, applyweights=False): """Classify by voting. Weighting is provided as argument. If not provided, a flat voting is performed. Args: indicesdir(str): path to the indices and labels files nfolds(int): number of folds of the cross-validation resdir(str): results of the cross validation and alson the output dir applyweights(bool): apply weights store in results.log files """ if not os.path.exists(outdir): os.mkdir(outdir) resultspath = os.path.join(outdir, 'results.log') resultsfh = open(resultspath, 'w') start = time() methpaths = load_methods_dirs(resdir, ['concatenated']) resultsfh.write('{} voting\n'.format('Weighted' if applyweights else 'Simple')) resultsfh.write('Number of features:\n') resultsfh.write('{}\n'.format(len(methpaths))) acc = np.ndarray([5,]) weights = [] if applyweights: for p in methpaths: respath = os.path.join(p, 'results.log') fh = open(respath) lines = fh.readlines() fh.close() weights.append(float(lines[10].split(',')[0].strip())) else: weights = np.ones((len(methpaths))) _, labels = load_indices_and_labels_crossvalidation(indicesdir, nfolds) tpsum = 0 resultsfh.write('Number of hits, training and prediction time per fold:\n') samplesz = 0 for f in range(nfolds): votes = [] vallabels = labels[f] for methpath in methpaths: devaluesepath = os.path.join(methpath, 'decvalues_fold{}.csv'.format(f)) decvalues = genfromtxt(devaluesepath, delimiter='\n') vote = np.ones(decvalues.shape, dtype=int) cginds = np.where(decvalues < 0) vote[cginds] = -1 votes.append(vote) numrows = votes[0].shape[0] votessum = np.zeros(votes[0].shape) for j in range(len(votes)): votessum += votes[j] * weights[j] votingres = np.ones(votessum.shape, dtype=int) cginds = np.where(votessum < 0) votingres[cginds] = -1 hits = np.equal(votingres, vallabels) tp = np.sum(hits) acc[f] = tp/votingres.shape[0] resultsfh.write('{}/{},0,0\n'.format(tp, votingres.shape[0])) tpsum += tp samplesz += votingres.shape[0] foldsvar = np.var(acc, ddof=1) resultsfh.write('Folds average accuracy and variance:\n') resultsfh.write('{},{}\n'.format(tpsum/samplesz, foldsvar)) resultsfh.write('Overall time:\n') resultsfh.write('{}\n'.format(time() - start)) resultsfh.write('SVM kernel and C param:\n') resultsfh.write('{},{}\n'.format(-1, -1)) resultsfh.close() def load_decvalues(methpaths, fold): """Load the decision values of generated by previous experiments Args: methpaths(list): each element contains the full path of a method result fold(int): fold of the cross-validation Returns: ndarray: each row contains the decision values of each method """ auxpath = os.path.join(methpaths[0], 'decvalues_fold{}.csv'.format(fold)) aux = genfromtxt(auxpath, delimiter='\n') nmethods = len(methpaths) features = np.ndarray((aux.shape[0], nmethods)) for idx, d in enumerate(methpaths): decvaluespath = os.path.join(d, 'decvalues_fold{}.csv'.format(fold)) decvalues = genfromtxt(decvaluespath, delimiter='\n') features[:, idx] = decvalues return features def load_methods_dirs(resultsrootdir, exclude): """Load sorted elements in resultsrootdir/, excluding files and directories starting with '_' Args: resultsrootdir(str): root path exclude(list): list of folders to exclude Returns: list of str: Sorted list of directories """ methpaths = [] dirs = sorted(os.listdir(resultsrootdir)) for d in dirs: if d.startswith('_'): continue if d in exclude: continue dirpath = os.path.join(resultsrootdir, d) if not os.path.isdir(dirpath): continue methpaths.append(dirpath) return methpaths def classify_decision_values(indicesdir, nfolds, resdir, outdir): if not os.path.exists(outdir): os.mkdir(outdir) start = time() labels = {} methpaths = load_methods_dirs(resdir, ['concatenated']) resultspath = os.path.join(outdir, 'results.log') resultsfh = open(resultspath, 'w') resultsfh.write('Decision values classification\n') dirs = sorted(os.listdir(resdir)) methpaths = load_methods_dirs(resdir, ['concatenated']) resultsfh.write('Number of features:\n') resultsfh.write('{}\n'.format(len(methpaths))) tpsum = 0 resultsfh.write('Number of hits, training and prediction time per fold:\n') samplesz = 0 ker = UNKNOWN_KERNEL_TYPE # Flag to compute grid search c = -1 # For each fold, create the features set and the validation set acc = np.ndarray([5,]) for k in range(nfolds): valfold = k trainfolds = list(range(nfolds)); trainfolds.remove(k) labelspath = os.path.join(indicesdir, 'labels_fold{}.csv'.format(k)) labels[k] = genfromtxt(labelspath, dtype=int) vallabels = labels[k] valfeatures = load_decvalues(methpaths, k) trainlabels = np.array([]) trainfeatures = np.ndarray((0, len(methpaths))) for kk in trainfolds: foldlabels = labels[k] trainlabels = np.concatenate((trainlabels, foldlabels), axis=0) foldfeatures = load_decvalues(methpaths, k) trainfeatures = np.concatenate((trainfeatures, foldfeatures), axis=0) tp, decvalues, traintime, predtime, c, ker = trainval(trainfeatures, trainlabels, valfeatures, vallabels, c, ker) decvaluespath = os.path.join(outdir, 'decvalues_fold{}.csv'.format(k)) with open(decvaluespath, 'w') as fh2: for v in decvalues: fh2.write('{}\n'.format(v)) resultsfh.write('{}/{},{},{}\n'.format(tp, valfeatures.shape[0], traintime, predtime)) tpsum += tp samplesz += valfeatures.shape[0] acc[k] = tp/valfeatures.shape[0] #foldsvar = np.var(acc, ddof=1) resultsfh.write('Folds average accuracy and variance:\n') resultsfh.write('{},{}\n'.format(tpsum/samplesz, np.var(acc, ddof=1))) resultsfh.write('Overall time:\n') resultsfh.write('{}\n'.format(time() - start)) resultsfh.write('SVM kernel and C param:\n') resultsfh.write('{},{}\n'.format(-1, -1)) resultsfh.close() def main_metaclassification(indicesdir, nfolds, resdir): simplevotingdir = os.path.join(resdir, '_simplevoting') weightedvotingdir = os.path.join(resdir, '_weightedvoting') decvaluesclassficationdir = os.path.join(resdir, '_decvaluesclassification') classify_simple_voting(indicesdir, nfolds, resdir, simplevotingdir) classify_weighted_voting(indicesdir, nfolds, resdir, weightedvotingdir, True) classify_decision_values(indicesdir, nfolds, resdir, decvaluesclassficationdir) def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--featuresdir', help='Path to the features folder') parser.add_argument('--indicesdir', help='Path to the indices/labels folder') parser.add_argument('--outdir', help='Output directory') parser.add_argument('--nfolds', help='Number of the folds in the cross-validation', default=5, type=int) parser.add_argument('--overwrite', help='Overwrite existing folders', action='store_true') nprocs = 7 args = parser.parse_args() if None in (vars(args)).values(): print(parser.description) print(parser.usage) return if os.path.exists(args.outdir) and not args.overwrite: print('{} exists. Add --overwrite'.format(args.outdir)) return featuresdir, indicesdir, outdir = map(expanduser, [args.featuresdir, args.indicesdir, args.outdir]) main_classification(featuresdir, indicesdir, outdir, nprocs) #main_metaclassification(indicesdir, args.nfolds, outdir) if __name__ == "__main__": main()
#include <bits/stdc++.h> using namespace std; void fun() { int a,b,x,y,ap,an,bp,bn,ar[10],max=0,i; cin>>a>>b>>x>>y; ap=a-x-1; an=x; bp=b-y-1; bn=y; ar[1]=ap*b; ar[2]=an*b; ar[3]=bp*a; ar[4]=bn*a; for(i=1;i<=4;i++) { if(max<ar[i]) max=ar[i]; } cout<<max<<endl; } int main() { int num; cin>>num; while(num>=1) { num--; fun(); } return 0; }
Media playback is unsupported on your device Media caption President Donald Trump seeks to set the record straight US President Donald Trump has insisted he is not under investigation, while dismissing the FBI director he fired as a "showboat" and "grandstander". Mr Trump also told NBC News it was his decision alone to sack James Comey. Mr Comey was leading an inquiry into alleged Russian meddling in the US election and possible collusion between Trump campaign officials and Moscow. Mr Trump has dismissed the probe as a "charade", a claim directly contradicted by Mr Comey's successor. In his first interview since firing the FBI director, Mr Trump told NBC News on Thursday he had asked Mr Comey whether he was under investigation. "I said, if it's possible would you let me know, 'Am I under investigation?' He said: 'You are not under investigation.'" Media playback is unsupported on your device Media caption What do Trump supporters think about Comey's firing? "I know I'm not under investigation," Mr Trump told the interviewer, repeating a claim he made in Tuesday's letter of dismissal to Mr Comey. Trump's dinner description challenged President Trump said Mr Comey first told him this at a dinner at the White House, which the FBI chief had requested because "he wanted to stay on" in his post under the new administration. But NBC later quoted an unnamed former senior FBI official close to Mr Comey as saying it was the White House that had requested the dinner, and that Mr Comey would not have told the president he was not under investigation. "He would say, 'look sir, I really can't get into it, and you don't want me to'," the former official was quoted as saying. The White House has rejected concerns raised by legal experts that the conversation, as described by Trump, may have been improper. Spokeswoman Sarah Huckabee Sanders said she "did not see it as a conflict of interest". According to the New York Times, two people who had heard Mr Comey's account - apparently of the same dinner - said Mr Comey declined a request to pledge loyalty to Mr Trump, but said he would be honest with him. How the sacking narrative has changed The president also appeared to undercut the initial White House explanation that he had fired Mr Comey on the recommendation of top justice officials. "He's a showboat. He's a grandstander. The FBI has been in turmoil. I was going to fire Comey. My decision," Mr Trump said. White House officials had previously pinned the decision on a memo written by Deputy Attorney General Rod Rosenstein, which Mr Trump refers to in the opening paragraph of his termination letter to Mr Comey, saying, "I have accepted their recommendation". But he told NBC: "I was going to fire him regardless of the recommendation." White House firefighting - Anthony Zurcher, BBC North America reporter On Thursday afternoon, the president took a wrecking ball to the White House's days of work. Oftentimes, it seems like the president and his press office are operating from different playbooks. The president says or tweets what he chooses, and his staff scrambles to explain the context or douse the flames of controversy. It happened when the president boasted about the size of his inauguration crowd, alleged that there were millions of illegal votes in the presidential election and accused Barack Obama of "wiretapping" him, among many other instances. On Thursday afternoon it was time to bring out the brooms once again. Read more from Anthony Zurcher: Three takeaways from the NBC interview Inquiry should be 'so strong' Mr Trump also denied that he wanted the FBI inquiry on Russia dropped, saying he, instead, wanted it "speeded up". "I want to find out if there was a problem with the election having to do with Russia... or any other country, I want that to be so strong and so good, and I want it to happen." This is despite saying in a tweet on Monday: "The Russia-Trump collusion story is a total hoax, when will this taxpayer funded charade end?" "There's no collusion between me and my campaign and the Russians," he told NBC. Media playback is unsupported on your device Media caption Trump's love-hate relationship with Comey over a tumultuous year Mr Trump said he had just sent a letter via a law firm to Republican Senator Lindsey Graham stating that he has no stake in Russia. "I have nothing to do with Russia," he said. "I have no investments in Russia. I don't have property in Russia. I'm not involved with Russia." What did the new acting FBI chief say? The White House has depicted the Russia inquiry as "probably one of the smallest things" that the FBI has "got going on their plate". But acting FBI Director Andrew McCabe said on Thursday that it was "a highly significant investigation". In testimony to the Senate intelligence committee, he also cast doubt on White House claims that Mr Comey had lost the confidence of his staff. Media playback is unsupported on your device Media caption Donald Trump was with Henry Kissinger when he told reporters James Comey was "not doing a good job" as FBI chief "I can confidently tell you that the vast majority of employees enjoyed a deep and positive connection to Director Comey," Mr McCabe said. The acting FBI director vowed not to update the White House on the status of the investigation and to notify the Senate panel of any attempt to interfere with the inquiry. Republican committee chairman Richard Burr asked Mr McCabe if he had ever heard Mr Comey tell Mr Trump the president was not the subject of investigation. Image copyright Getty Images Image caption Andrew McCabe said the FBI had full faith in James Comey Mr McCabe said he could not comment on an ongoing inquiry. The acting FBI director did not confirm reports that Mr Comey had asked for more resources for the agency's Russia inquiry. Mr McCabe said he believed the FBI had sufficient funding to conduct the probe.
#pragma once #include "Program.hpp" namespace bf::opt { class OptimizationPass { public: virtual void Initialize() = 0; virtual bf::Program Optimize(bf::Program program) = 0; virtual const std::string& GetPassName() const = 0; virtual ~OptimizationPass() = default; }; } // namespace bf::opt
<gh_stars>100-1000 /** * @file * @brief Simple test to draw frame via embox/fb interface * * @date Jun 21, 2017 * @author <NAME> */ #include <stdint.h> #include <inttypes.h> #include <stdlib.h> #include <unistd.h> #include <stdio.h> #include <fcntl.h> #include <sys/mman.h> #include <util/math.h> #include <lib/fb_draw.h> #define MIN(a, b) ((a) < (b) ? (a) : (b)) int main() { struct screen screen_info; int x, y, scr_pos = 0; uint8_t r, g, b; if (fb_draw_init_screen(&screen_info, 0) != 0) { return -1; } for (y = 0; y < screen_info.height; y++) { for (x = 0; x < screen_info.width; x++) { r = MIN(0xFF, (1 + x + y) / ((screen_info.width + screen_info.height) / 0xFF)); g = MIN(0xFF, (x + 1) / (screen_info.width / 0xFF)); b = MIN(0xFF, (1 + screen_info.height - y) / (screen_info.height / 0xFF)); if (fb_draw_put_pix(r, g, b, 32, &screen_info, scr_pos + x) != 0) { return -1; } } scr_pos += screen_info.width; } return 0; }
/** * Test the feature {@link se.jbee.inject.defaults.DefaultFeature#SELF} which * allows to inject the {@link Name} or {@link Type} that the created instance * represents within the {@link Injector} context. * <p> * This allows to get hold of the instance's {@link Name} and full generic * {@link Type}. * <p> * It is also possible to inject the full {@link Dependency} that caused the * creation of an instance. * <p> * All this information can be extracted from the {@link Dependency} itself that * resolves the {@link Name}, {@link Type} or {@link Dependency} value. * <p> * Without question this feature is most useful in building more powerful * features on top of others. Within actual application code this might appear * useful but should be used with caution as these types of information are * specific to dependency injection context and should not exist directly as an * application level concept. */ class TestFeatureSelfInjectionBinds { public static class Foo<T> { final Name actualName; final Type<? extends Foo<T>> actualType; final Dependency<? extends Foo<T>> actualDependency; public Foo(Type<? extends Foo<T>> actualType, Name actualName, Dependency<? extends Foo<T>> actualDependency) { this.actualType = actualType; this.actualName = actualName; this.actualDependency = actualDependency; } } public static class SuperFoo<T> extends Foo<T> { final Foo<String> innerFoo; public SuperFoo(Foo<String> innerFoo, Name actualName, Type<? extends SuperFoo<T>> actualType, Dependency<SuperFoo<T>> actualDependency) { super(actualType, actualName, actualDependency); this.innerFoo = innerFoo; } } public static class Bar { final Foo<List<Integer>> foo; final SuperFoo<Double> superFoo; public Bar(Foo<List<Integer>> foo, SuperFoo<Double> superFoo) { this.foo = foo; this.superFoo = superFoo; } } public static class Que extends Foo<BigInteger> { final Foo<?> genericFoo; public Que(Foo<?> genericFoo, Type<Que> actualType, Name actualName, Dependency<Que> actualDependency) { super(actualType, actualName, actualDependency); this.genericFoo = genericFoo; } } /** * The specific binds done here are less important. * They should just create different scenarios that can be tested. */ private static class TestFeatureSelfInjectionBindsModule extends BinderModule { @Override protected void declare() { // make Foo and SuperFoo be created per instance per(Scope.dependencyInstance).bind(Name.ANY, Foo.class).toConstructor(); per(Scope.dependencyInstance).bind(Name.ANY, SuperFoo.class).toConstructor(); // give the Foo in Bar a name we can check for injectingInto(Bar.class).construct("myNameNested", Foo.class); // give SuperFoo in Bar a name using a Hint bind(Bar.class).toConstructor( instance(named("special"), raw(SuperFoo.class)).asHint()); injectingInto(SuperFoo.class).bind(Foo.class).to("inner", Foo.class); construct(Que.class); bind(named("x"), Que.class).toConstructor(instance(named("y"), raw(Foo.class).parameterized(String.class)).asHint()); } } private final Injector context = Bootstrap.injector( TestFeatureSelfInjectionBindsModule.class); /* Type */ @Test void actualTypeFromAdHoc() { Foo<?> fooString = context.resolve( raw(Foo.class).parameterized(String.class)); assertSame(String.class, fooString.actualType.parameter(0).rawType); } @Test void actualTypeFromParameterNestedFlatType() { assertEquals(listTypeOf(Integer.class), context.resolve(Bar.class).foo.actualType.parameter(0)); } @Test void actualTypeFromParameterNestedDeepType() { assertEquals(raw(SuperFoo.class).parameterized(Double.class), context.resolve(Bar.class).superFoo.actualType); } @Test void actualTypeFromParameterNestedFlatWildcardType() { assertEquals(raw(Foo.class).parameterizedAsUpperBounds(), context.resolve(Que.class).genericFoo.actualType); } @Test void actualTypeFromParameterNestedFlatWildcardTypeWithHintOverload() { assertEquals(raw(Foo.class).parameterized(String.class), context.resolve("x", Que.class).genericFoo.actualType); } @Test void actualTypeFromParameterDoubleNested() { assertEquals(String.class, context.resolve( Bar.class).superFoo.innerFoo.actualType.parameter(0).rawType); } @Test void actualTypeFromParameterNestedTypeParameter() { assertEquals(raw(Que.class), context.resolve("x", Que.class).actualType); } /* Name */ @Test void actualNameFromAdHocInjected() { assertEquals(named("ad-hoc"), context.resolve(named("ad-hoc"), raw(Foo.class).parameterized(String.class)).actualName); } @Test void actualNameFromToClauseAndHintNested() { Bar bar = context.resolve(Bar.class); assertEquals(named("myNameNested"), bar.foo.actualName); assertEquals(named("special"), bar.superFoo.actualName); } @Test void actualNameFromToClauseDoubleNested() { assertEquals(named("inner"), context.resolve(Bar.class).superFoo.innerFoo.actualName); } @Test void actualNameFromParameterNestedFlatWildcardType() { assertEquals(Name.ANY, context.resolve(Que.class).genericFoo.actualName); } @Test void actualNameFromParameterNestedFlatWildcardTypeWithHintOverload() { Que que = context.resolve("x", Que.class); assertEquals(named("x"), que.actualName); assertEquals(named("y"), que.genericFoo.actualName); } /* Dependency */ @Test void actualDependencyFromAdHoc() { @SuppressWarnings("rawtypes") Type<Foo> type = raw(Foo.class).parameterized(String.class); assertSimilar( dependency(type.asUpperBound()) // .injectingInto(anyOf(raw(Foo.class))), // context.resolve(type).actualDependency); } @Test void actualDependencyFromParameterNestedFlatWildcardTypeWithHintOverload() { Que que = context.resolve("x", Que.class); assertSimilar(dependency(Que.class) // .injectingInto(instance(named("x"), raw(Que.class))), // que.actualDependency); } @Test void actualDependencyFromParameterNestedDeep() { Bar bar = context.resolve(Bar.class); assertSimilar(dependency(Type.raw(SuperFoo.class).parameterized(Double.class)) // .injectingInto(Bar.class) // .injectingInto(anyOf(SuperFoo.class)), // bar.superFoo.actualDependency); } /** * There are lots of details in a {@link Dependency} - to replicate them all * exactly goes beyond what we try to check so we are happy if both have * the same string output. */ private void assertSimilar(Dependency<?> expected, Dependency<?> actual) { assertEquals(expected.toString(), actual.toString()); } }
def cpu_baseline(full=False, migratable=False, out='libvirt', **kwargs): conn = __get_conn(**kwargs) caps = _capabilities(conn) cpu = caps.getElementsByTagName('host')[0].getElementsByTagName('cpu')[0] log.debug('Host CPU model definition: %s', cpu.toxml()) flags = 0 if migratable: if getattr(libvirt, 'VIR_CONNECT_BASELINE_CPU_MIGRATABLE', False): flags += libvirt.VIR_CONNECT_BASELINE_CPU_MIGRATABLE else: conn.close() raise ValueError if full and getattr(libvirt, 'VIR_CONNECT_BASELINE_CPU_EXPAND_FEATURES', False): flags += libvirt.VIR_CONNECT_BASELINE_CPU_EXPAND_FEATURES cpu = conn.baselineCPU([cpu.toxml()], flags) cpu = minidom.parseString(cpu).getElementsByTagName('cpu') cpu = cpu[0] conn.close() if full and not getattr(libvirt, 'VIR_CONNECT_BASELINE_CPU_EXPAND_FEATURES', False): with salt.utils.files.fopen('/usr/share/libvirt/cpu_map.xml', 'r') as cpu_map: cpu_map = minidom.parse(cpu_map) cpu_model = cpu.getElementsByTagName('model')[0].childNodes[0].nodeValue while cpu_model: cpu_map_models = cpu_map.getElementsByTagName('model') cpu_specs = [el for el in cpu_map_models if el.getAttribute('name') == cpu_model and el.hasChildNodes()] if not cpu_specs: raise ValueError('Model {0} not found in CPU map'.format(cpu_model)) elif len(cpu_specs) > 1: raise ValueError('Multiple models {0} found in CPU map'.format(cpu_model)) cpu_specs = cpu_specs[0] cpu_model = cpu_specs.getElementsByTagName('model') if not cpu_model: cpu_model = None else: cpu_model = cpu_model[0].getAttribute('name') for feature in cpu_specs.getElementsByTagName('feature'): cpu.appendChild(feature) if out == 'salt': return { 'model': cpu.getElementsByTagName('model')[0].childNodes[0].nodeValue, 'vendor': cpu.getElementsByTagName('vendor')[0].childNodes[0].nodeValue, 'features': [feature.getAttribute('name') for feature in cpu.getElementsByTagName('feature')] } return cpu.toxml()
package crawler import ( "log" "strconv" "strings" "github.com/yangchenxi/VOCALOIDTube/model/youtubeData" ) //producer/comsumer model for web crawling and processing //感觉不是正经的producer/consumer,但是就这么玩吧 // 一个runner分配任务给多个worker,然后workerfinish以后继续给分任务 type Runner struct { TaskQueue []string NumberOfWorkers int } func NewRunner(workerNum int, tasks []string) *Runner { return &Runner{ TaskQueue: tasks, NumberOfWorkers: workerNum, } } //producer func (r *Runner) startRunner() { workers := make([]dataChan, r.NumberOfWorkers) ctrlChan := make(controlChan, r.NumberOfWorkers) //防止阻塞 for i := 0; i < r.NumberOfWorkers; i++ { workers[i] = make(dataChan, 1) workers[i] <- r.TaskQueue[0] r.TaskQueue = r.TaskQueue[1:] go process(i, workers[i], ctrlChan) } for { select { case c := <-ctrlChan: if c.errorData != "" { log.Println("ctrl channel Error:" + c.errorData) if strings.Contains(c.errorData, "Daily Limit Exceeded") { //TODO:wait another day return } } else { //handle new data to queue r.TaskQueue = append(r.TaskQueue, c.newData...) } //assign new work workers[c.workerNum] <- r.TaskQueue[0] r.TaskQueue = r.TaskQueue[1:] //log.Println(r.TaskQueue) } } } //consumer func process(id int, dchan dataChan, ctrlChan controlChan) { //select 等候分配data for { select { case d := <-dchan: log.Println("worker " + strconv.Itoa(id) + "start") data, err := processVideoID(d) if err != nil { ctrlChan <- controlData{ workerNum: id, errorData: err.Error(), } } else { ctrlChan <- controlData{ workerNum: id, newData: data, } } log.Println("worker " + strconv.Itoa(id) + "end") } } } func processVideoID(vid string) ([]string, error) { //time.Sleep(1 * time.Second) //TODO:这里回头调一下,反正每个线程单独一个channel,不怕阻塞 resp, err := youtubeData.GetSuggestedVideosIDFromVideoID(vid) if err != nil { return nil, err } //TODO:add filter and Bayes //TODO: store the data in resp to db //if in db, remove from resp, remember to add parent 字段 return resp, nil }
Gil Liane (@gil_e_liane) So Australians have to vote to pass gay marriage, but get no say about going to war with North Korea at the behest of a madman? Variants of this tweet circulated throughout social media on Friday following Malcolm Turnbull’s announcement that, as per the Anzus treaty, Australia would automatically support Donald Trump in a conflict with North Korea. Many posed the query rhetorically, highlighting the ridiculous obstructions to marriage reform. But the question is a serious one. Why shouldn’t Australians be consulted before their government enlists in a battle between two nuclear-armed states? Australia will back US in any conflict with North Korea, Turnbull says Read more It’s not as if the last US-led invasion that our politicians embraced turned out so thumpingly well. Fourteen years later, the war on Iraq launched by Bush and Blair and Howard hasn’t so much ended as evolved, mutating into ever more ghastly forms. In July, the Iraqi army finally drove Islamic State from Mosul – and the images of that once beautiful city reduced to rubble and twisted steel revealed something of the horror inflicted on ordinary Iraqis by a succession of different combatants. The continuing slaughter in that country (and in the related conflict in Syria) highlights the propensity of wars to spread, particularly when they are fought in strategic locations. Well, the Korean peninsula is a strategic location, too – not least for China, the rising superpower identified by White House strategists as a long-term adversary. Right now, it’s easy to imagine how a war with North Korea might start; it’s a lot more difficult to say exactly how it would end. Yet, according to the prime minister, we’re committed in advance to whatever bloody debacle Trump sparks during one of his late-night tweeting sprees. All the way with Donald J – that’s, apparently, where we’re at. Many people would assume that a matter as substantive as joining a war would, at the very least, be voted on by MPs, the elected representatives of the Australian people. But as the parliamentary library drily explains, “since 1901, neither the Australian constitution nor defence legislation has required the government to gain parliamentary approval for the decision to deploy forces overseas or, in the rare cases that it has occurred, to declare war”. In other words, in every past occasion where Australians have killed and died on orders from their government, they’ve done so because of decisions made by the prime minister and cabinet alone. There is, however, one partial exception – and, bizarrely enough, it, too, involves a plebiscite. By a weird historical irony, Turnbull’s postal ballot on same-sex marriage comes on the 100th anniversary of the second conscription referendum, a poll that was also staged as a tricky maneuvre to circumvent the left. In 1916, with the first world war already descending into mechanised slaughter, Labor prime minister Billy Hughes returned from a visit to England determined to introduce military conscription. The idea of forcing men to fight was deeply unpalatable to the labour movement, seen even by war supporters as dictatorial. The PM could not convince his own party and lacked the numbers to push a bill through the Senate. Hughes – a far cannier politician than Turnbull – hit upon a plebiscite as a mechanism to circumvent the Labor left. A public vote would not be binding but he assumed (probably correctly) that a resounding victory for yes would force the rebels into line. There was every reason for confidence. As the historian Russel Ward explains in his book Australia: A Short History, “Most prominent and respectable citizens, the entire daily press of the country, and the still very influential Sydney Bulletin passionately advocated compulsion. And most church leaders concurred. The Anglican Synod passed unanimously a resolution certifying that the war was a religious one, that God was on the side of the Allies, and that conscription was morally necessary.” The anti-conscriptionists possessed few comparable resources. But they set about building the No case from the grassroots up. The suppression of the Easter uprising in Ireland left many Catholic workers deeply suspicious about Britain’s war motives, and the archbishop of Melbourne, Dr Daniel Mannix, became a powerful voice against compulsion. Agitators organised in their workplaces and spoke in country town halls; protesters came out Sunday after Sunday on huge open air rallies at the Domain and the Yarra Bank. In Melbourne, perhaps 50,000 people attended a meeting in the Exhibition Building, and a stop work rally brought 40,000 on to the streets. The young John Curtin urged fellow unionists to “refuse to be bullied, or lied to, or voted into the slavery of military control … ” The eventual result showed 1,100,033 Australians voting against conscription compared with 1,087,557 who supported it – a stunning vindication of the no campaign and a huge rebuff to the PM. Hughes duly walked out of the Labor party and formed a national government with the aid of conservatives. As the war dragged on and enlistment numbers dropped, the generals and journalists and politicians insisted that Australia required compulsory military service. But, after 1916, it was no longer politically palatable to introduce conscription without a vote. Hughes duly announced a second plebiscite on 7 November 1917. This time, the ballot avoided any mention of conscription, simply asking voters whether they favoured sending reinforcements to the troops. The poll was scheduled on a weekday (which made voting more difficult for workers ); the electoral rolls were closed a mere two days after the announcement of the referendum, to prevent No campaigners from enrolling their supporters. The intensification of military censorship saw key activists like Henry E Boote, the editor of The Worker, arrested for describing conscription as the “lottery of death”, while Hughes instructed the authorities to ban Queensland’s Hansard because it contained a “No” speech delivered by premier Ryan. Meanwhile, pro-government propaganda circulated unimpeded. “Every No vote is a vote against Britain, France, Belgium, Italy, American and Australia … ” one leaflet explained. “Every elector who votes No condones the slaughter of innocent thousands in Belgium, in Northern France, in Serbia. He condones the maiming of children, the violation of women, the crippling of myriads who had given no offence and deserved no punishment.” Even the staunchest anti-conscriptionists doubted they could win under such circumstances. In fact, when the ballots were counted, the No vote had increased. What happened? Ironically, the plebiscite introduced as a way of silencing leftists gave them an extraordinary platform. The heightened atmosphere produced by the debate facilitated a broader politicisation. With military compulsion discussed in every workplace, church and school, ordinary Australians found themselves contemplating ideas that they’d never otherwise have considered – and, in many cases, opposition to conscription became opposition to the war as a whole. That’s a crucial lesson from Hughes’ plebiscite for the one we face today: simply, the battle is winnable. Turnbull might have designed his postal ballot to delay marriage reform but he still needs to convince voters. The experience of the last decade has shown that the more that people think about equal marriage, the more they’re inclined to support it. The conservative case relies on fear and ignorance, sentiments that a grassroots campaign can overcome. They did it 1917. We can do it today. But the plebiscite should also raise broader questions about democracy in Australia. Turnbull considers Nato request for more Australian troops in Afghanistan Read more Subsequent governments never replicated Hughes’s experiment with wartime polling. There was no plebiscite prior to the introduction of conscription during the second world war, nor during the intervention in Vietnam. The quagmire in Afghanistan illustrates the problem. Australia joined the American-led invasion of 2001 without any parliamentary vote at all. By 2008 public opinion had turned decisively against the war but Afghanistan wasn’t discussed in parliament until two years later – and the “debate” that ensued then consisted of the two major parties agreeing with each other about the need to stay the course. Again, if we have to vote on love, why can’t we vote on war? Turnbull describes same-sex marriage as a “very big moral issue”. Many of us feel the same way about a nuclear exchange on the Korean peninsula. One way or another, we need to make our voices heard.
<reponame>lc6chang/supl-client // Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.location.suplclient.asn1.supl2.lpp; // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // // import com.google.location.suplclient.asn1.base.Asn1Boolean; import com.google.location.suplclient.asn1.base.Asn1Enumerated; import com.google.location.suplclient.asn1.base.Asn1Integer; import com.google.location.suplclient.asn1.base.Asn1Object; import com.google.location.suplclient.asn1.base.Asn1Sequence; import com.google.location.suplclient.asn1.base.Asn1Tag; import com.google.location.suplclient.asn1.base.BitStream; import com.google.location.suplclient.asn1.base.BitStreamReader; import com.google.location.suplclient.asn1.base.SequenceComponent; import com.google.common.collect.ImmutableList; import java.util.Collection; import javax.annotation.Nullable; /** * */ public class GNSS_AcquisitionAssistElement extends Asn1Sequence { // private static final Asn1Tag TAG_GNSS_AcquisitionAssistElement = Asn1Tag.fromClassAndNumber(-1, -1); public GNSS_AcquisitionAssistElement() { super(); } @Override @Nullable protected Asn1Tag getTag() { return TAG_GNSS_AcquisitionAssistElement; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_GNSS_AcquisitionAssistElement != null) { return ImmutableList.of(TAG_GNSS_AcquisitionAssistElement); } else { return Asn1Sequence.getPossibleFirstTags(); } } /** * Creates a new GNSS_AcquisitionAssistElement from encoded stream. */ public static GNSS_AcquisitionAssistElement fromPerUnaligned(byte[] encodedBytes) { GNSS_AcquisitionAssistElement result = new GNSS_AcquisitionAssistElement(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new GNSS_AcquisitionAssistElement from encoded stream. */ public static GNSS_AcquisitionAssistElement fromPerAligned(byte[] encodedBytes) { GNSS_AcquisitionAssistElement result = new GNSS_AcquisitionAssistElement(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override protected boolean isExtensible() { return true; } @Override public boolean containsExtensionValues() { for (SequenceComponent extensionComponent : getExtensionComponents()) { if (extensionComponent.isExplicitlySet()) return true; } return false; } private SV_ID svID_; public SV_ID getSvID() { return svID_; } /** * @throws ClassCastException if value is not a SV_ID */ public void setSvID(Asn1Object value) { this.svID_ = (SV_ID) value; } public SV_ID setSvIDToNewInstance() { svID_ = new SV_ID(); return svID_; } private GNSS_AcquisitionAssistElement.doppler0Type doppler0_; public GNSS_AcquisitionAssistElement.doppler0Type getDoppler0() { return doppler0_; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.doppler0Type */ public void setDoppler0(Asn1Object value) { this.doppler0_ = (GNSS_AcquisitionAssistElement.doppler0Type) value; } public GNSS_AcquisitionAssistElement.doppler0Type setDoppler0ToNewInstance() { doppler0_ = new GNSS_AcquisitionAssistElement.doppler0Type(); return doppler0_; } private GNSS_AcquisitionAssistElement.doppler1Type doppler1_; public GNSS_AcquisitionAssistElement.doppler1Type getDoppler1() { return doppler1_; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.doppler1Type */ public void setDoppler1(Asn1Object value) { this.doppler1_ = (GNSS_AcquisitionAssistElement.doppler1Type) value; } public GNSS_AcquisitionAssistElement.doppler1Type setDoppler1ToNewInstance() { doppler1_ = new GNSS_AcquisitionAssistElement.doppler1Type(); return doppler1_; } private GNSS_AcquisitionAssistElement.dopplerUncertaintyType dopplerUncertainty_; public GNSS_AcquisitionAssistElement.dopplerUncertaintyType getDopplerUncertainty() { return dopplerUncertainty_; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.dopplerUncertaintyType */ public void setDopplerUncertainty(Asn1Object value) { this.dopplerUncertainty_ = (GNSS_AcquisitionAssistElement.dopplerUncertaintyType) value; } public GNSS_AcquisitionAssistElement.dopplerUncertaintyType setDopplerUncertaintyToNewInstance() { dopplerUncertainty_ = new GNSS_AcquisitionAssistElement.dopplerUncertaintyType(); return dopplerUncertainty_; } private GNSS_AcquisitionAssistElement.codePhaseType codePhase_; public GNSS_AcquisitionAssistElement.codePhaseType getCodePhase() { return codePhase_; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.codePhaseType */ public void setCodePhase(Asn1Object value) { this.codePhase_ = (GNSS_AcquisitionAssistElement.codePhaseType) value; } public GNSS_AcquisitionAssistElement.codePhaseType setCodePhaseToNewInstance() { codePhase_ = new GNSS_AcquisitionAssistElement.codePhaseType(); return codePhase_; } private GNSS_AcquisitionAssistElement.intCodePhaseType intCodePhase_; public GNSS_AcquisitionAssistElement.intCodePhaseType getIntCodePhase() { return intCodePhase_; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.intCodePhaseType */ public void setIntCodePhase(Asn1Object value) { this.intCodePhase_ = (GNSS_AcquisitionAssistElement.intCodePhaseType) value; } public GNSS_AcquisitionAssistElement.intCodePhaseType setIntCodePhaseToNewInstance() { intCodePhase_ = new GNSS_AcquisitionAssistElement.intCodePhaseType(); return intCodePhase_; } private GNSS_AcquisitionAssistElement.codePhaseSearchWindowType codePhaseSearchWindow_; public GNSS_AcquisitionAssistElement.codePhaseSearchWindowType getCodePhaseSearchWindow() { return codePhaseSearchWindow_; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.codePhaseSearchWindowType */ public void setCodePhaseSearchWindow(Asn1Object value) { this.codePhaseSearchWindow_ = (GNSS_AcquisitionAssistElement.codePhaseSearchWindowType) value; } public GNSS_AcquisitionAssistElement.codePhaseSearchWindowType setCodePhaseSearchWindowToNewInstance() { codePhaseSearchWindow_ = new GNSS_AcquisitionAssistElement.codePhaseSearchWindowType(); return codePhaseSearchWindow_; } private GNSS_AcquisitionAssistElement.azimuthType azimuth_; public GNSS_AcquisitionAssistElement.azimuthType getAzimuth() { return azimuth_; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.azimuthType */ public void setAzimuth(Asn1Object value) { this.azimuth_ = (GNSS_AcquisitionAssistElement.azimuthType) value; } public GNSS_AcquisitionAssistElement.azimuthType setAzimuthToNewInstance() { azimuth_ = new GNSS_AcquisitionAssistElement.azimuthType(); return azimuth_; } private GNSS_AcquisitionAssistElement.elevationType elevation_; public GNSS_AcquisitionAssistElement.elevationType getElevation() { return elevation_; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.elevationType */ public void setElevation(Asn1Object value) { this.elevation_ = (GNSS_AcquisitionAssistElement.elevationType) value; } public GNSS_AcquisitionAssistElement.elevationType setElevationToNewInstance() { elevation_ = new GNSS_AcquisitionAssistElement.elevationType(); return elevation_; } private GNSS_AcquisitionAssistElement.codePhase1023Type extensionCodePhase1023; public GNSS_AcquisitionAssistElement.codePhase1023Type getExtensionCodePhase1023() { return extensionCodePhase1023; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.codePhase1023Type */ public void setExtensionCodePhase1023(Asn1Object value) { extensionCodePhase1023 = (GNSS_AcquisitionAssistElement.codePhase1023Type) value; } public void setExtensionCodePhase1023ToNewInstance() { extensionCodePhase1023 = new GNSS_AcquisitionAssistElement.codePhase1023Type(); } private GNSS_AcquisitionAssistElement.dopplerUncertaintyExt_r10Type extensionDopplerUncertaintyExt_r10; public GNSS_AcquisitionAssistElement.dopplerUncertaintyExt_r10Type getExtensionDopplerUncertaintyExt_r10() { return extensionDopplerUncertaintyExt_r10; } /** * @throws ClassCastException if value is not a GNSS_AcquisitionAssistElement.dopplerUncertaintyExt_r10Type */ public void setExtensionDopplerUncertaintyExt_r10(Asn1Object value) { extensionDopplerUncertaintyExt_r10 = (GNSS_AcquisitionAssistElement.dopplerUncertaintyExt_r10Type) value; } public void setExtensionDopplerUncertaintyExt_r10ToNewInstance() { extensionDopplerUncertaintyExt_r10 = new GNSS_AcquisitionAssistElement.dopplerUncertaintyExt_r10Type(); } @Override public Iterable<? extends SequenceComponent> getComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 0); @Override public boolean isExplicitlySet() { return getSvID() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getSvID(); } @Override public void setToNewInstance() { setSvIDToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? SV_ID.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "svID : " + getSvID().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 1); @Override public boolean isExplicitlySet() { return getDoppler0() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getDoppler0(); } @Override public void setToNewInstance() { setDoppler0ToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_AcquisitionAssistElement.doppler0Type.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "doppler0 : " + getDoppler0().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 2); @Override public boolean isExplicitlySet() { return getDoppler1() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getDoppler1(); } @Override public void setToNewInstance() { setDoppler1ToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_AcquisitionAssistElement.doppler1Type.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "doppler1 : " + getDoppler1().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 3); @Override public boolean isExplicitlySet() { return getDopplerUncertainty() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getDopplerUncertainty(); } @Override public void setToNewInstance() { setDopplerUncertaintyToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_AcquisitionAssistElement.dopplerUncertaintyType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "dopplerUncertainty : " + getDopplerUncertainty().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 4); @Override public boolean isExplicitlySet() { return getCodePhase() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getCodePhase(); } @Override public void setToNewInstance() { setCodePhaseToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_AcquisitionAssistElement.codePhaseType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "codePhase : " + getCodePhase().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 5); @Override public boolean isExplicitlySet() { return getIntCodePhase() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getIntCodePhase(); } @Override public void setToNewInstance() { setIntCodePhaseToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_AcquisitionAssistElement.intCodePhaseType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "intCodePhase : " + getIntCodePhase().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 6); @Override public boolean isExplicitlySet() { return getCodePhaseSearchWindow() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getCodePhaseSearchWindow(); } @Override public void setToNewInstance() { setCodePhaseSearchWindowToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_AcquisitionAssistElement.codePhaseSearchWindowType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "codePhaseSearchWindow : " + getCodePhaseSearchWindow().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 7); @Override public boolean isExplicitlySet() { return getAzimuth() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getAzimuth(); } @Override public void setToNewInstance() { setAzimuthToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_AcquisitionAssistElement.azimuthType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "azimuth : " + getAzimuth().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 8); @Override public boolean isExplicitlySet() { return getElevation() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getElevation(); } @Override public void setToNewInstance() { setElevationToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_AcquisitionAssistElement.elevationType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "elevation : " + getElevation().toIndentedString(indent); } }); return builder.build(); } @Override public Iterable<? extends SequenceComponent> getExtensionComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); builder.add(new SequenceComponent() { @Override public boolean isExplicitlySet() { return getExtensionCodePhase1023() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return true; } @Override public Asn1Object getComponentValue() { return getExtensionCodePhase1023(); } @Override public void setToNewInstance() { setExtensionCodePhase1023ToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { throw new UnsupportedOperationException( "BER decoding not supported for extension elements"); } @Override public Asn1Tag getTag() { throw new UnsupportedOperationException( "BER is not supported for extension elements"); } @Override public boolean isImplicitTagging() { throw new UnsupportedOperationException( "BER is not supported for extension elements"); } @Override public String toIndentedString(String indent) { return "codePhase1023 : " + getExtensionCodePhase1023().toIndentedString(indent); } }); builder.add(new SequenceComponent() { @Override public boolean isExplicitlySet() { return getExtensionDopplerUncertaintyExt_r10() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return true; } @Override public Asn1Object getComponentValue() { return getExtensionDopplerUncertaintyExt_r10(); } @Override public void setToNewInstance() { setExtensionDopplerUncertaintyExt_r10ToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { throw new UnsupportedOperationException( "BER decoding not supported for extension elements"); } @Override public Asn1Tag getTag() { throw new UnsupportedOperationException( "BER is not supported for extension elements"); } @Override public boolean isImplicitTagging() { throw new UnsupportedOperationException( "BER is not supported for extension elements"); } @Override public String toIndentedString(String indent) { return "dopplerUncertaintyExt_r10 : " + getExtensionDopplerUncertaintyExt_r10().toIndentedString(indent); } }); return builder.build(); } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class doppler0Type extends Asn1Integer { // private static final Asn1Tag TAG_doppler0Type = Asn1Tag.fromClassAndNumber(-1, -1); public doppler0Type() { super(); setValueRange(new java.math.BigInteger("-2048"), new java.math.BigInteger("2047")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_doppler0Type; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_doppler0Type != null) { return ImmutableList.of(TAG_doppler0Type); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new doppler0Type from encoded stream. */ public static doppler0Type fromPerUnaligned(byte[] encodedBytes) { doppler0Type result = new doppler0Type(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new doppler0Type from encoded stream. */ public static doppler0Type fromPerAligned(byte[] encodedBytes) { doppler0Type result = new doppler0Type(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "doppler0Type = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class doppler1Type extends Asn1Integer { // private static final Asn1Tag TAG_doppler1Type = Asn1Tag.fromClassAndNumber(-1, -1); public doppler1Type() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("63")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_doppler1Type; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_doppler1Type != null) { return ImmutableList.of(TAG_doppler1Type); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new doppler1Type from encoded stream. */ public static doppler1Type fromPerUnaligned(byte[] encodedBytes) { doppler1Type result = new doppler1Type(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new doppler1Type from encoded stream. */ public static doppler1Type fromPerAligned(byte[] encodedBytes) { doppler1Type result = new doppler1Type(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "doppler1Type = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class dopplerUncertaintyType extends Asn1Integer { // private static final Asn1Tag TAG_dopplerUncertaintyType = Asn1Tag.fromClassAndNumber(-1, -1); public dopplerUncertaintyType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("4")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_dopplerUncertaintyType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_dopplerUncertaintyType != null) { return ImmutableList.of(TAG_dopplerUncertaintyType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new dopplerUncertaintyType from encoded stream. */ public static dopplerUncertaintyType fromPerUnaligned(byte[] encodedBytes) { dopplerUncertaintyType result = new dopplerUncertaintyType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new dopplerUncertaintyType from encoded stream. */ public static dopplerUncertaintyType fromPerAligned(byte[] encodedBytes) { dopplerUncertaintyType result = new dopplerUncertaintyType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "dopplerUncertaintyType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class codePhaseType extends Asn1Integer { // private static final Asn1Tag TAG_codePhaseType = Asn1Tag.fromClassAndNumber(-1, -1); public codePhaseType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("1022")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_codePhaseType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_codePhaseType != null) { return ImmutableList.of(TAG_codePhaseType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new codePhaseType from encoded stream. */ public static codePhaseType fromPerUnaligned(byte[] encodedBytes) { codePhaseType result = new codePhaseType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new codePhaseType from encoded stream. */ public static codePhaseType fromPerAligned(byte[] encodedBytes) { codePhaseType result = new codePhaseType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "codePhaseType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class intCodePhaseType extends Asn1Integer { // private static final Asn1Tag TAG_intCodePhaseType = Asn1Tag.fromClassAndNumber(-1, -1); public intCodePhaseType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("127")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_intCodePhaseType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_intCodePhaseType != null) { return ImmutableList.of(TAG_intCodePhaseType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new intCodePhaseType from encoded stream. */ public static intCodePhaseType fromPerUnaligned(byte[] encodedBytes) { intCodePhaseType result = new intCodePhaseType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new intCodePhaseType from encoded stream. */ public static intCodePhaseType fromPerAligned(byte[] encodedBytes) { intCodePhaseType result = new intCodePhaseType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "intCodePhaseType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class codePhaseSearchWindowType extends Asn1Integer { // private static final Asn1Tag TAG_codePhaseSearchWindowType = Asn1Tag.fromClassAndNumber(-1, -1); public codePhaseSearchWindowType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("31")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_codePhaseSearchWindowType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_codePhaseSearchWindowType != null) { return ImmutableList.of(TAG_codePhaseSearchWindowType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new codePhaseSearchWindowType from encoded stream. */ public static codePhaseSearchWindowType fromPerUnaligned(byte[] encodedBytes) { codePhaseSearchWindowType result = new codePhaseSearchWindowType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new codePhaseSearchWindowType from encoded stream. */ public static codePhaseSearchWindowType fromPerAligned(byte[] encodedBytes) { codePhaseSearchWindowType result = new codePhaseSearchWindowType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "codePhaseSearchWindowType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class azimuthType extends Asn1Integer { // private static final Asn1Tag TAG_azimuthType = Asn1Tag.fromClassAndNumber(-1, -1); public azimuthType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("511")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_azimuthType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_azimuthType != null) { return ImmutableList.of(TAG_azimuthType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new azimuthType from encoded stream. */ public static azimuthType fromPerUnaligned(byte[] encodedBytes) { azimuthType result = new azimuthType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new azimuthType from encoded stream. */ public static azimuthType fromPerAligned(byte[] encodedBytes) { azimuthType result = new azimuthType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "azimuthType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class elevationType extends Asn1Integer { // private static final Asn1Tag TAG_elevationType = Asn1Tag.fromClassAndNumber(-1, -1); public elevationType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("127")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_elevationType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_elevationType != null) { return ImmutableList.of(TAG_elevationType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new elevationType from encoded stream. */ public static elevationType fromPerUnaligned(byte[] encodedBytes) { elevationType result = new elevationType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new elevationType from encoded stream. */ public static elevationType fromPerAligned(byte[] encodedBytes) { elevationType result = new elevationType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "elevationType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class codePhase1023Type extends Asn1Boolean { // private static final Asn1Tag TAG_codePhase1023Type = Asn1Tag.fromClassAndNumber(-1, -1); public codePhase1023Type() { super(); } @Override @Nullable protected Asn1Tag getTag() { return TAG_codePhase1023Type; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_codePhase1023Type != null) { return ImmutableList.of(TAG_codePhase1023Type); } else { return Asn1Boolean.getPossibleFirstTags(); } } /** * Creates a new codePhase1023Type from encoded stream. */ public static codePhase1023Type fromPerUnaligned(byte[] encodedBytes) { codePhase1023Type result = new codePhase1023Type(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new codePhase1023Type from encoded stream. */ public static codePhase1023Type fromPerAligned(byte[] encodedBytes) { codePhase1023Type result = new codePhase1023Type(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "codePhase1023Type = " + getValue() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class dopplerUncertaintyExt_r10Type extends Asn1Enumerated { public enum Value implements Asn1Enumerated.Value { d60(0), d80(1), d100(2), d120(3), noInformation(4), ; Value(int i) { value = i; } private int value; public int getAssignedValue() { return value; } @Override public boolean isExtensionValue() { return false; } } @Override protected Value getDefaultValue() { return null ; } @SuppressWarnings("unchecked") public Value enumValue() { return (Value) getValue(); } public void setTo_d60() { setValue(Value.d60); } public void setTo_d80() { setValue(Value.d80); } public void setTo_d100() { setValue(Value.d100); } public void setTo_d120() { setValue(Value.d120); } public void setTo_noInformation() { setValue(Value.noInformation); } public enum ExtensionValue implements Asn1Enumerated.Value { ; ExtensionValue(int i) { value = i; } private int value; @Override public int getAssignedValue() { return value; } @Override public boolean isExtensionValue() { return true; } } @SuppressWarnings("unchecked") public ExtensionValue extEnumValue() { return (ExtensionValue) getValue(); } private static final Asn1Tag TAG_dopplerUncertaintyExt_r10Type = Asn1Tag.fromClassAndNumber(-1, -1); public dopplerUncertaintyExt_r10Type() { super(); // use template substitution instead of calling getDefaultValue(), since // calling virtual methods from a ctor is frowned upon here. setValue(null ); } @Override @Nullable protected Asn1Tag getTag() { return TAG_dopplerUncertaintyExt_r10Type; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_dopplerUncertaintyExt_r10Type != null) { return ImmutableList.of(TAG_dopplerUncertaintyExt_r10Type); } else { return Asn1Enumerated.getPossibleFirstTags(); } } @Override protected boolean isExtensible() { return true; } @Override protected Asn1Enumerated.Value lookupValue(int ordinal) { return Value.values()[ordinal]; } @Override protected Asn1Enumerated.Value lookupExtensionValue(int ordinal) { return ExtensionValue.values()[ordinal]; } @Override protected int getValueCount() { return Value.values().length; } /** * Creates a new dopplerUncertaintyExt_r10Type from encoded stream. */ public static dopplerUncertaintyExt_r10Type fromPerUnaligned(byte[] encodedBytes) { dopplerUncertaintyExt_r10Type result = new dopplerUncertaintyExt_r10Type(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new dopplerUncertaintyExt_r10Type from encoded stream. */ public static dopplerUncertaintyExt_r10Type fromPerAligned(byte[] encodedBytes) { dopplerUncertaintyExt_r10Type result = new dopplerUncertaintyExt_r10Type(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "dopplerUncertaintyExt_r10Type = " + getValue() + ";\n"; } } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { StringBuilder builder = new StringBuilder(); builder.append("GNSS_AcquisitionAssistElement = {\n"); final String internalIndent = indent + " "; for (SequenceComponent component : getComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } if (isExtensible()) { builder.append(internalIndent).append("...\n"); for (SequenceComponent component : getExtensionComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } } builder.append(indent).append("};\n"); return builder.toString(); } }
A DUP MP has indicated that if the Irish government attempted to propose measures for resolving the Stormont talks impasse, it would instantenously scupper any negotiations. Jeffrey Donaldson, MP for Lagan Valley and the man in charge of party discipline at Westminster, also said that it now looks likely the Province will face a period of direct rule, with no resolution in sight to the DUP-Sinn Fein impasse. He was speaking after the SDLP on Monday re-iterated a call for both the UK government and the Irish government to jointly put forward their own proposals for how the negotiations deadlock can be broken. Its party leader, Colum Eastwood, had initially issued such a statement on September 11, stating that if no deal was reached by the end of that week, then both governments “should intervene and publish their own joint proposals on what they believe to be a fair deal and compromise”. Yesterday, he re-stated this call more emphatically, saying both governments must now “forcefully step in to forge a deal that can accommodate both the Irish and the British traditions in the North”, and that parties should then be challenged to either opt in or opt out of whatever their proposals were. Mr Eastwood said yesterday: “Last week all of us gave the DUP and Sinn Fein space to engage in serious discussions. “We were asked to believe that the more positive tone and language of this last number of weeks would lead to a breakthrough.” Despite this, he said politicians “have once more run into the same old stalemate”. Mr Donaldson was asked about the call for the Irish and UK governments to set out a plan for the parties. He told the News Letter: “Given that most of the issues that we’re dealing with relate entirely to the internal affairs of Northern Ireland, there is no way that we would countenance proposals coming forward from the Irish government. “Such a proposition would be a fundamental breach of the Good Friday Agreement and the three-stranded approach (the name for the different segments of the agreement, which do not set out any Dublin participation in the government of Northern Ireland), and would bring the talks to an end. “They cross that line, the talks are over.” He said the discussions with Sinn Fein are “still at an impasse”. He would not be specific about the main sticking points, but said that “Sinn Fein’s refusal to form a government without everybody else conceding to their demands” – demands which centre on the Irish language as the “main issue”. Asked if he can forsee anything happening in the week ahead to break the deadlock, he said: “I don’t. At the moment, I don’t see Sinn Fein backing down and agreeing to form a government.” He said that “therefore I think we’re headed for a period of direct rule” because “frankly we can’t afford to hang around much longer without a government”. Morning View: Donaldson right to dismiss Eastwood’s joint proposal idea He said he expects direct rule to kick in “in the next few weeks, when parliament returns”. When that happens, the party “will have to take stock at that stage”. Parliament is currently suspended during the party conference period, and returns on October 9. Within the last fortnight, there had been signs that perhaps the chances of a deal emerging between the DUP and Sinn Fein were growing better. On September 6, Sinn Fein and the DUP had each issued statements – containing similar content, and issued within minutes of each other – which said the two parties had been locked in a spell of “intensified dialogue” (in Sinn Fein’s wording) and “detailed engagement” (in the DUP’s wording) about a solution to the impasse, and that this was set to continue. In response to the SDLP’s call, the UK government – via the Northern Ireland Office – said: “The UK government, along with the Irish government, is engaging with the Northern Ireland parties to secure the re-establishment of inclusive, devolved government at Stormont, and the operation of all the institutions established under the Belfast Agreement. “That is our clear objective and we are determined to succeed.” Morning View: Donaldson right to dismiss Eastwood’s joint proposal idea
def _make_stem_layer(self): self.conv1 = ConvModule( self.in_channels, self.base_channels, kernel_size=self.conv1_kernel, stride=(self.conv1_stride_t, 2, 2), padding=tuple([(k - 1) // 2 for k in _triple(self.conv1_kernel)]), conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg, act_cfg=self.act_cfg) self.maxpool = nn.MaxPool3d( kernel_size=(1, 3, 3), stride=(self.pool1_stride_t, 2, 2), padding=(0, 1, 1)) self.pool2 = nn.MaxPool3d(kernel_size=(2, 1, 1), stride=(2, 1, 1))
// Must be called with interrupts disabled pub unsafe extern "C" fn cpuid() -> usize { let i = (mycpu() as *const Cpu).offset_from(cpus.as_ptr()); assert!(i >= 0); i as usize }
/** * Handles the changes which are completed during a scale gesture. * @param scaleFactor - the scale factor of the scale gesture. */ @Override public void onScaleChanged(float scaleFactor) { if (scaleFactor == ScaleHandler.MIN_WIDTH || scaleFactor == ScaleHandler.MAX_WIDTH) canvasView.setPreviousStrokeWidth(Math.round(scaleFactor) - 1); canvasView.setStrokeWidth(Math.round(scaleFactor)); ViewGroup.LayoutParams params = penIcon.getLayoutParams(); params.width = (int) scaleFactor; params.height = (int) scaleFactor; penIcon.setLayoutParams(params); }
// readOffsetTable reads an offset table at the given position and returns a map // with the key strings concatenated by the 0xff unicode non-character. func (r *indexReader) readOffsetTable(off uint64) (map[string]uint32, error) { const sep = "\xff" var ( d1 = r.decbufAt(int(off)) d2 = d1.decbuf(d1.be32int()) cnt = d2.be32() ) res := make(map[string]uint32, 512) for d2.err() == nil && d2.len() > 0 && cnt > 0 { keyCount := int(d2.uvarint()) keys := make([]string, 0, keyCount) for i := 0; i < keyCount; i++ { keys = append(keys, d2.uvarintStr()) } res[strings.Join(keys, sep)] = uint32(d2.uvarint()) cnt-- } return res, d2.err() }
/// Decision Forest module. /** @file */ // Author: <NAME> (TM) // (C) Copyright 2019, AI Werkstatt (TM) www.aiwerkstatt.com. All rights reserved. // Basic concepts for the implementation of the classifier are based on // <NAME>, “Understanding Random Forests”, PhD Thesis, 2014 #include <iostream> #include <set> #include "utilities.h" #include "decision_tree.h" #include "decision_forest.h" using namespace std; namespace koho { // ============================================================================= // Decision Forest Classifier // ============================================================================= // Create and initialize a new decision forest classifier. auto calculate_n_classes = [](const vector<vector<string>>& classes) { vector<ClassesIdx_t> n_classes(classes.size(), 0); for (OutputsIdx_t o=0; o<classes.size(); o++) { n_classes[o] = classes[o].size(); } return n_classes; }; DecisionForestClassifier::DecisionForestClassifier(vector<vector<string>> const& classes, vector<string> const& features, unsigned long n_estimators, bool bootstrap, bool oob_score, string const& class_balance, TreeDepthIdx_t max_depth, FeaturesIdx_t max_features, unsigned long max_thresholds, string const& missing_values, long random_state_seed) : n_outputs(classes.size()), classes(classes), n_classes(calculate_n_classes(classes)), features(features), n_features(features.size()) { // for convenience DecisionForestClassifier::n_classes_max = *max_element(begin(DecisionForestClassifier::n_classes), end(DecisionForestClassifier::n_classes)); // Check hyperparameters // n estimators if ((0 < n_estimators)) DecisionForestClassifier::n_estimators = n_estimators; else DecisionForestClassifier::n_estimators = 100; // default // bootstrap DecisionForestClassifier::bootstrap = bootstrap; // oob_score if (bootstrap) DecisionForestClassifier::oob_score = oob_score; else DecisionForestClassifier::oob_score = false; // class balance if (class_balance == "balanced" || class_balance == "None") DecisionForestClassifier::class_balance = class_balance; else DecisionForestClassifier::class_balance = "balanced"; // default // max depth const TreeDepthIdx_t MAX_DEPTH = 2147483647; // max long: (2^31)-1 if ((0 < max_depth) && (max_depth <= MAX_DEPTH)) DecisionForestClassifier::max_depth = max_depth; else DecisionForestClassifier::max_depth = MAX_DEPTH; // max features if ((0 < max_features) && (max_features <= DecisionForestClassifier::n_features)) DecisionForestClassifier::max_features = max_features; else DecisionForestClassifier::max_features = DecisionForestClassifier::n_features; // max thresholds if ((max_thresholds == 0) || (max_thresholds == 1)) DecisionForestClassifier::max_thresholds = max_thresholds; else DecisionForestClassifier::max_thresholds = 0; // missing values if (missing_values == "NMAR" || missing_values == "None") DecisionForestClassifier::missing_values = missing_values; else DecisionForestClassifier::missing_values = "None"; // default // Random Number Generator if (random_state_seed == -1) DecisionForestClassifier::random_state = RandomState(); else DecisionForestClassifier::random_state = RandomState(static_cast<unsigned long>(random_state_seed)); } // Build a decision forest classifier from the training data. void DecisionForestClassifier::fit(vector<Features_t>& X, vector<Classes_t>& y) { // number of samples SamplesIdx_t n_samples = y.size() / n_outputs; if (n_samples != X.size() / n_features) { throw runtime_error("Mismatch: n_outputs, n_features and n_samples."); } // Create explicitly different seeds for the decision trees // to avoid building the same tree over and over again for the entire decision forest // when decision trees are build in parallel. vector<long> algo_seeds(n_estimators); for (unsigned long e = 0; e < n_estimators; ++e) { algo_seeds[e] = random_state.uniform_int(0, random_state.MAX_INT); } // Instantiate decision trees for (unsigned long e = 0; e < n_estimators; ++e) { dtc_.emplace_back(DecisionTreeClassifier(classes, features, class_balance, max_depth, max_features, max_thresholds, missing_values, algo_seeds[e])); } oob_score_ = 0.0; // Build decision trees from training data if (!bootstrap) { // >>> mapping embarrassing parallelism for (unsigned long e = 0; e < n_estimators; ++e) { dtc_[e].fit(X, y); // decision trees } } else { // Bagging & Out-Of-Bag estimate // Different seeds for algorithm and data (bagging) // to avoid building the same trees multiple times // when the same seed comes up again. vector<long> data_seeds(n_estimators); for (unsigned long e = 0; e < n_estimators; ++e) { data_seeds[e] = random_state.uniform_int(0, random_state.MAX_INT); } // >>> mapping embarrassing parallelism vector<vector<double>> ps; for (unsigned long e = 0; e < n_estimators; ++e) { // Build a decision tree from the bootstrapped training data // drawing random samples with replacement vector<SamplesIdx_t> idx(n_samples); RandomState random_state = RandomState(static_cast<unsigned long>(data_seeds[e])); for (SamplesIdx_t s = 0; s < n_samples; ++s) { idx[s] = static_cast<SamplesIdx_t>(random_state.uniform_int(0, n_samples)); } vector<double> X_train; vector<long> y_train; for (SamplesIdx_t s = 0; s < n_samples; ++s) { // samples for (FeaturesIdx_t f = 0; f < n_features; ++f) { // features X_train.emplace_back(X[idx[s] * n_features + f]); } y_train.emplace_back(y[idx[s]]); } unsigned long n_samples_train = n_samples; // make sure training data includes all classes across all outputs unsigned long cnt = 0; while (true) { // check bool all = true; for (unsigned long o=0; o<n_outputs; ++o) { set<long> classesSet; for (unsigned long c = 0; c < n_classes[o]; ++c) { classesSet.insert(c); } for (unsigned long i = 0; i < n_samples_train; ++i) { classesSet.erase(y[i * n_outputs + o]); if (classesSet.empty()) continue; } if (!classesSet.empty()) { all = false; continue; } } if (all) break; // redraw samples X_train.clear(); y_train.clear(); for (SamplesIdx_t s = 0; s < n_samples; ++s) { idx[s] = static_cast<SamplesIdx_t>(random_state.uniform_int(0, n_samples)); } for (SamplesIdx_t s = 0; s < n_samples; ++s) { // samples for (FeaturesIdx_t f = 0; f < n_features; ++f) { // features X_train.emplace_back(X[idx[s] * n_features + f]); } y_train.emplace_back(y[idx[s]]); } // unable to randomize training data while including all classes if (cnt++ > 10000) { throw runtime_error("Unable to randomize training data including all classes for bagging."); } } dtc_[e].fit(X_train, y_train); // decision trees // Compute Out-Of-Bag estimates // as average error for all samples across all outputs when not included in bootstrap // We use n_classes_max to create a nice 3D array to hold the predicted values x samples x classes // as the number of classes can be different for different outputs vector<double> p(n_samples * n_outputs * n_classes_max, 0.0); if (oob_score) { vector<bool> unsampled_idx(n_samples, true); for (SamplesIdx_t s = 0; s < n_samples; ++s) unsampled_idx[idx[s]] = false; vector<double> X_test; unsigned long n_samples_test = 0; for (SamplesIdx_t s = 0; s < n_samples; ++s) { // samples if (unsampled_idx[s] == 0) { // unsampled for (FeaturesIdx_t f = 0; f < n_features; ++f) { // features X_test.emplace_back(X[idx[s] * n_features + f]); } n_samples_test++; } } if (n_samples_test > 0) { vector<double> y_prob(n_samples_test * n_outputs * n_classes_max, 0.0); dtc_[e].predict_proba(&X_test[0], n_samples_test, &y_prob[0]); unsigned long i = 0; for (SamplesIdx_t s = 0; s < n_samples; ++s) { // samples if (unsampled_idx[s] == 0) { // unsampled p[s] = y_prob[i++]; } } } } ps.emplace_back(p); } if (oob_score) { // Predict classes probabilities for all outputs for the decision forest // as average of the class probabilities from all decision trees // >>> reduce vector<double> class_probabilities(n_samples * n_outputs * n_classes_max, 0.0); vector<bool> valid_idx(n_samples, false); unsigned long n_valid_idx = 0; for (SamplesIdx_t s = 0; s < n_samples; ++s) { bool valid = false; for (OutputsIdx_t o=0; o< n_outputs; ++o) { for (ClassesIdx_t c = 0; c < n_classes[o]; ++c) { double sum = 0.0; for (unsigned long e = 0; e < n_estimators; ++e) { sum += ps[e][s * n_outputs * n_classes_max + o * n_classes_max + c]; } // no normalization needed when using maxIndex( ) later on class_probabilities[s * n_outputs * n_classes_max + o * n_classes_max + c] = sum; // Identify samples with oob score if (sum > 0.0) { valid = true; } } } // Identify samples with oob score if (valid) { valid_idx[s] = true; n_valid_idx++; } else { break; } } if (n_valid_idx == n_samples) { // oob score for all samples // Predict classes vector<long> predictions(n_samples*n_outputs, 0); for (SamplesIdx_t s = 0; s < n_samples; ++s) { for (OutputsIdx_t o = 0; o < n_outputs; ++o) { predictions[s * n_outputs + o] = maxIndex(&class_probabilities[s * n_outputs * n_classes_max + o * n_classes_max], n_classes[o]); } } // Score unsigned long n_true = 0; for (SamplesIdx_t s = 0; s < n_samples; ++s) { for (OutputsIdx_t o = 0; o < n_outputs; ++o) { if (valid_idx[s]) { if (y[s * n_outputs + o] == predictions[s * n_outputs + o]) n_true++; } } } oob_score_ = static_cast<double>(n_true) / (n_valid_idx*n_outputs); } else { oob_score_ = 0.0; cout << "Only " << n_valid_idx << " out of " << n_samples << "have an out-of-bag estimate. " << "This probably means too few estimators were used " << "to compute any reliable oob estimates." << endl; } } } } // Predict classes probabilities for the test data. void DecisionForestClassifier::predict_proba(Features_t* X, SamplesIdx_t n_samples, double* y_prob) { // Predict class probabilities for all outputs for all decision trees // We use n_classes_max to create a nice 3D array to hold the predicted values x samples x classes // as the number of classes can be different for different outputs // >>> mapping embarrassing parallelism vector<vector<double>> ps; for (unsigned long e = 0; e < n_estimators; ++e) { vector<double> p(n_samples * n_outputs * n_classes_max, 0.0); dtc_[e].predict_proba(&X[0], n_samples, &p[0]); ps.emplace_back(p); } // Predict classes probabilities for all outputs for the decision forest // as average of the class probabilities from all decision trees // >>> reduce for (SamplesIdx_t s = 0; s < n_samples; ++s) { for (OutputsIdx_t o=0; o< n_outputs; ++o) { for (ClassesIdx_t c = 0; c < n_classes[o]; ++c) { double sum = 0.0; for (unsigned long e = 0; e < n_estimators; ++e) { sum += ps[e][s * n_outputs * n_classes_max + o * n_classes_max + c]; } y_prob[s * n_outputs * n_classes_max + o * n_classes_max + c] = sum / n_estimators; } } } } // Predict classes for the test data. void DecisionForestClassifier::predict(Features_t* X, SamplesIdx_t n_samples, Classes_t* y) { // We use n_classes_max to create a nice 3D array to hold the predicted values x samples x classes // as the number of classes can be different for different outputs vector<double> y_prob(n_samples * n_outputs * n_classes_max, 0.0); predict_proba(X, n_samples, &y_prob[0]); for (SamplesIdx_t s=0; s<n_samples; ++s) { for (OutputsIdx_t o=0; o<n_outputs; ++o) { y[s * n_outputs + o] = maxIndex(&y_prob[s * n_outputs * n_classes_max + o * n_classes_max], n_classes[o]); } } } // Calculate score for the test data. double DecisionForestClassifier::score(Features_t* X, Classes_t* y, SamplesIdx_t n_samples) { vector<long> y_predict(n_samples*n_outputs, 0); predict(X, n_samples, &y_predict[0]); unsigned long n_true = 0; for (SamplesIdx_t i = 0; i < n_samples; ++i) { for (OutputsIdx_t o = 0; o < n_outputs; ++o) { if (y_predict[i*n_outputs + o] == y[i*n_outputs + o]) n_true++; } } return static_cast<double>(n_true) / (n_samples*n_outputs); } // Calculate feature importances from the decision forest. void DecisionForestClassifier::calculate_feature_importances(double *importances) { // Calculate feature importances for all decision trees // >>> mapping embarrassing parallelism vector<vector<double>> dtc_importances(n_estimators, vector<double>(n_features, 0.0)); for (unsigned long e = 0; e < n_estimators; ++e) { dtc_[e].calculate_feature_importances(&dtc_importances[e][0]); } // Calculate feature importances for the decision forest // as average of feature importances from all decision trees // >>> reduce for (FeaturesIdx_t f = 0; f < n_features; ++f) { double sum = 0.0; for (unsigned long e = 0; e < n_estimators; ++e) { sum += dtc_importances[e][f]; } importances[f] = sum / n_estimators; } } // Export of a decision forest as individual decision trees in GraphViz dot format. void DecisionForestClassifier::export_graphviz(std::string const& filename, bool rotate) { for (unsigned long e = 0; e < n_estimators; ++e) { dtc_[e].export_graphviz(filename + "_" + to_string(e), rotate); } } // Export of a decision tree from a decision forest in GraphViz dot format. void DecisionForestClassifier::export_graphviz(std::string const& filename, unsigned long e, bool rotate) { dtc_[e].export_graphviz(filename + "_" + to_string(e), rotate); } // Export of a decision tree from a decision forest in a simple text format. std::string DecisionForestClassifier::export_text(unsigned long e) { return dtc_[e].export_text(); } // Serialize void DecisionForestClassifier::serialize(std::ofstream& fout) { // Number of Outputs fout.write((const char*)(&n_outputs), sizeof(n_outputs)); // Classes for (OutputsIdx_t o=0; o<n_outputs; ++o) { fout.write((const char *) (&n_classes[o]), sizeof(n_classes[o])); } for (OutputsIdx_t o=0; o<n_outputs; ++o) { for (unsigned long c=0; c<n_classes[o]; ++c) { unsigned long size = classes[o][c].size(); fout.write((const char *) &size, sizeof(size)); fout.write((const char *) &classes[o][c][0], size); } } // Features fout.write((const char*)(&n_features), sizeof(n_features)); for (unsigned long f=0; f<n_features; ++f) { unsigned long size = features[f].size(); fout.write((const char*)&size, sizeof(size)); fout.write((const char*)&features[f][0], size); } // Hyperparameters fout.write((const char*)&n_estimators, sizeof(n_estimators)); fout.write((const char*)&bootstrap, sizeof(bootstrap)); fout.write((const char*)&oob_score, sizeof(oob_score)); unsigned long size = class_balance.size(); fout.write((const char*)&size, sizeof(size)); fout.write((const char*)&class_balance[0], size); fout.write((const char*)&max_depth, sizeof(max_depth)); fout.write((const char*)&max_features, sizeof(max_features)); fout.write((const char*)&max_thresholds, sizeof(max_thresholds)); size = missing_values.size(); fout.write((const char*)&size, sizeof(size)); fout.write((const char*)&missing_values[0], size); // Random Number Generator fout.write((const char*)&random_state, sizeof(random_state)); // Model // Serialize Decision Trees done separately fout.write((const char*)&oob_score_, sizeof(oob_score_)); } // Export of a decision forest classifier in binary serialized format // with separate files for the individual decision trees. void DecisionForestClassifier::export_serialize(std::string const& filename) { string fn = filename + ".dfc"; ofstream fout(fn, ios_base::binary); if (fout.is_open()) { const int version = 2; // file version number fout.write((const char*)&version, sizeof(version)); // Serialize Decision Forest Classifier serialize(fout); fout.close(); // Export of decision tree classifiers in binary serialized format for (unsigned long e = 0; e < n_estimators; ++e) { dtc_[e].export_serialize(filename + "_" + to_string(e)); } return; } else { throw runtime_error("Unable to open file."); } } // Deserialize DecisionForestClassifier DecisionForestClassifier::deserialize(std::ifstream& fin) { // Number of Outputs OutputsIdx_t n_outputs; fin.read((char*)(&n_outputs), sizeof(n_outputs)); // Classes vector<ClassesIdx_t> n_classes; for (OutputsIdx_t o=0; o<n_outputs; ++o) { ClassesIdx_t o_n_classes; fin.read((char *) (&o_n_classes), sizeof(o_n_classes)); n_classes.emplace_back(o_n_classes); } vector<vector<string>> classes; for (OutputsIdx_t o=0; o<n_outputs; ++o) { vector<string> o_classes; for (unsigned long c=0; c<n_classes[o]; ++c) { string str; unsigned long size; fin.read((char*)(&size), sizeof(size)); str.resize(size); fin.read((char*)(&str[0]), size); o_classes.emplace_back(str); } classes.emplace_back(o_classes); } // Features FeaturesIdx_t n_features; vector<string> features; fin.read((char*)(&n_features), sizeof(n_features)); for (unsigned long f=0; f<n_features; ++f) { string str; unsigned long size; fin.read((char*)(&size), sizeof(size)); str.resize(size); fin.read((char*)(&str[0]), size); features.emplace_back(str); } // Hyperparameters unsigned long n_estimators; bool bootstrap; bool oob_score; string class_balance; TreeDepthIdx_t max_depth; FeaturesIdx_t max_features; unsigned long max_thresholds; string missing_values; fin.read((char*)(&n_estimators), sizeof(n_estimators)); fin.read((char*)(&bootstrap), sizeof(bootstrap)); fin.read((char*)(&oob_score), sizeof(oob_score)); unsigned long size; fin.read((char*)(&size), sizeof(size)); class_balance.resize(size); fin.read((char*)(&class_balance[0]), size); fin.read((char*)(&max_depth), sizeof(max_depth)); fin.read((char*)(&max_features), sizeof(max_features)); fin.read((char*)(&max_thresholds), sizeof(max_thresholds)); fin.read((char*)(&size), sizeof(size)); fin.read((char*)(&missing_values[0]), size); // Random Number Generator long random_state_seed = 0; DecisionForestClassifier dfc(classes, features, n_estimators, bootstrap, oob_score, class_balance, max_depth, max_features, max_thresholds, missing_values, random_state_seed); // Random Number Generator - overwrite random state fin.read((char*)(&dfc.random_state), sizeof(dfc.random_state)); // Model // Deserialize Decision Trees separately done fin.read((char*)(&dfc.oob_score_), sizeof(dfc.oob_score_)); return dfc; } // Import of a decision forest classifier in binary serialized format // with separate files for the individual decision trees. DecisionForestClassifier DecisionForestClassifier::import_deserialize(std::string const& filename) { string fn = filename + ".dfc"; ifstream fin(fn, ios_base::binary); if (fin.is_open()) { int version; fin.read((char*)(&version), sizeof(version)); if (version == 2) { // file version number // Deserialize Decision Forest Classifier DecisionForestClassifier dfc = deserialize(fin); fin.close(); // Import of decision tree classifiers in binary serialized format for (unsigned long e = 0; e < dfc.n_estimators; ++e) { DecisionTreeClassifier dtc = DecisionTreeClassifier::import_deserialize(filename + "_" + to_string(e)); dfc.dtc_.emplace_back(dtc); } return dfc; } else { fin.close(); throw runtime_error("Unsupported file version number."); } } else { throw runtime_error("Unable to open file."); } } } // namespace koho
<filename>example.py # -*- coding: utf8 -*- from financialCrawler import clients print("\nTesting crawlers by fetching the stock price of THYAO:") print ("\nfetching THYAO stock price from uzmanpara.com") print clients.Uzmanpara.getStock("THYAO") print ("\nfetching THYAO stock price from bigpara.com") print clients.Bigpara.getStock("THYAO") print ("\nfetching THYAO stock price from MarketWatch.com") print clients.MarketWatch.getStock("THYAO") print ("\nfetching euro-usd parity from MarketWatch.com") print clients.MarketWatch.getParity("eurusd") print ("\nfetching euro-usd parity from Google.com") print clients.Google.getParity("eurusd") print ("\nfetching usd-jpy parity from MarketWatch.com") print clients.MarketWatch.getParity("usdjpy") print ("\nfetching brent oil price from MarketWatch.com") print clients.MarketWatch.getOil() print ("\nfetching brent oil price from thewallstreetjournal.com") print clients.TWSJ.getOil()
def make_desired_disp(vertices, DeformType = DispType.random, num_of_vertices = -1): if(num_of_vertices < 1): get_num_of_verts(vertices) if(DeformType == DispType.random): return normalizeVec(npr.rand(2*num_of_vertices)) elif(DeformType == DispType.isotropic): return normalizeVec(vertices.flatten()) elif(DeformType == DispType.explicit_1): return np.vstack ((np.array([[0.0, 0.0], [0, -2], [-1, -1]]), npr.rand(num_of_vertices - 3, 2))).flatten() elif(DeformType == DispType.explicit_2): return np.vstack ((np.array([[0.0, 0.0], [0, 0], [-0.5 + 1.5*np.sin(np.pi/6), 0.3 - 1.5*np.cos(np.pi/6)]]), npr.rand(num_of_vertices - 3, 2))).flatten()
<reponame>ralic/unit<filename>luminous_flux.go package unit // LuminousFlux represents a SI unit for luminous flux (in lumen, lm) type LuminousFlux Unit // constants const ( Lumen LuminousFlux = 1e0 // SI ) // Lumen returns the luminous flux in lm func (l LuminousFlux) Lumen() float64 { return float64(l) }
def convert_model_to_tflite(Keras_model_dir, project_dir, model_name, optimization, data_loader_path, quant_dtype, separator, csv_target_label): keras_model = Keras_model_dir keras_model = tf.keras.models.load_model(keras_model) model_input_shape = keras_model.input.shape model_output_neurons = keras_model.layers[-1].output_shape[1] converter = lite.TFLiteConverter.from_keras_model(keras_model) if "Quantization" in optimization: global x_train x_train = dataloader_quantization(data_loader_path, keras_model.input.shape[1], keras_model.input.shape[2], separator, csv_target_label) x_train = tf.cast(x_train, tf.float32) x_train = tf.data.Dataset.from_tensor_slices(x_train).batch(1) converter.optimizations = [tf.lite.Optimize.DEFAULT] converter.representative_dataset = representative_dataset print(quant_dtype) if "int8 only" in quant_dtype: converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] converter.inference_input_type = tf.int8 converter.inference_output_type = tf.int8 tflite_model = converter.convert() open(project_dir + "/" + model_name + ".tflite", "wb").write(tflite_model) return model_input_shape, model_output_neurons
<filename>src/main/java/com/github/kaiwinter/rhapsody/service/metadata/SearchService.java package com.github.kaiwinter.rhapsody.service.metadata; import java.util.Collection; import com.github.kaiwinter.rhapsody.model.AlbumData.Track; import retrofit.http.GET; import retrofit.http.Query; /** * Wrapper of the Search REST API. * * @see <a href="https://developer.rhapsody.com/api#search">https://developer.rhapsody.com/api#search</a> */ public interface SearchService { /** * Returns an array of typed results by substring. * Optionally scoped by content type . * * @param apikey * the API key * @param pretty * if <code>true</code> pretty prints the JSON * @param catalog * countries' catalog (two-letter country code, which is case-sensitive) * @param type * The content type (artist, album, track, playlist). * @param q * The search query. * @param limit * The number of tracks to load, if <code>null</code> the default value is used (20) * @return the {@link Track} */ @GET("/v1/search/typeahead") Collection<Track> search( @Query("apikey") String apikey, @Query("pretty") boolean pretty, @Query("catalog") String catalog, @Query("type") String type, @Query("q") String q, @Query("limit") Integer limit); }
/** * Loads the provided element's child panes onto this component. If the element contains any child panes, this will * mutate this component. * * @param instance the instance to apply field and method references on * @param element the element to load * @since 0.8.0 */ public void load(@NotNull Object instance, @NotNull Element element) { NodeList childNodes = element.getChildNodes(); for (int innerIndex = 0; innerIndex < childNodes.getLength(); innerIndex++) { Node innerItem = childNodes.item(innerIndex); if (innerItem.getNodeType() != Node.ELEMENT_NODE) { continue; } addPane(Gui.loadPane(instance, innerItem)); } }
<gh_stars>10-100 package com.arangodb.intellij.aql.actions; import com.arangodb.intellij.aql.model.AqlQuery; import com.arangodb.intellij.aql.ui.dialogs.AqlParameterDialog; import com.arangodb.intellij.aql.ui.windows.AqlConsoleWindow; import com.arangodb.intellij.aql.util.AqlConst; import com.arangodb.intellij.aql.util.AqlUtils; import com.arangodb.intellij.aql.util.log; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.editor.Caret; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ToolWindow; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import org.jetbrains.annotations.NotNull; import java.util.Collections; import java.util.Map; import java.util.Set; public abstract class AqlQueryAction extends AnAction { protected void runQueryAction(@NotNull final AnActionEvent event, final AqlDataService.QueryType type) { final Project project = getEventProject(event); if (!canExecute(project, event)) { return; } final CharSequence charSequence = extractQuery(project, event); if (charSequence.length() < 1) { log.warn("No query found/selected"); return; } final Set<String> names = AqlUtils.extractParameterNames(charSequence, project); final AqlDataService service = AqlDataService.with(project); final String query = charSequence.toString(); // check existing: final AqlQuery existing = service.getExistingQueryForValue(query); if (existing != null) { execute(type, service, query, existing.getParameters()); return; } final int paramsSize = names.size(); if (paramsSize > 0) { final PsiElement element = event.getDataContext().getData(CommonDataKeys.PSI_ELEMENT); final AqlParameterDialog dialog = new AqlParameterDialog(project, names, element); final boolean ok = dialog.showAndGet(); if (ok) { final Map<String, String> data = dialog.getData(); execute(type, service, query, data); showConsole(project); saveQuery(event, service, query, data); return; } log.error("No parameters defined"); return; } execute(type, service, query, Collections.emptyMap()); saveQuery(event, service, query, Collections.emptyMap()); showConsole(project); } private void execute(final AqlDataService.QueryType type, final AqlDataService service, final String query, final Map<String, String> data) { if (type == AqlDataService.QueryType.QUERY) { service.executeQuery(query, data); } else { service.explainQuery(query, data); } } protected void saveQuery(@NotNull final AnActionEvent event, final AqlDataService service, final String query, final Map<String, String> data) { final VirtualFile file = event.getDataContext().getData(PlatformDataKeys.VIRTUAL_FILE); final String name = file == null ? "Query" : file.getName() + "_query"; service.saveQuery(new AqlQuery(name, query, data)); } protected void showConsole(final Project project) { assert project != null; final ToolWindow window = ToolWindowManager.getInstance(project).getToolWindow(AqlConsoleWindow.WINDOW_ID); window.activate(null, true); } protected boolean canExecute(final Project project, @NotNull final AnActionEvent anActionEvent) { final Editor editor = anActionEvent.getData(CommonDataKeys.EDITOR_EVEN_IF_INACTIVE); if (project == null || editor == null) { return false; } final PsiFile psiFile = anActionEvent.getData(CommonDataKeys.PSI_FILE); if (psiFile == null) { return false; } final String id = psiFile.getLanguage().getID(); if (!AqlConst.AQL_LANGUAGE_ID.equals(id)) { return false; } // TODO remove this....(see below) final ToolWindow window = ToolWindowManager.getInstance(project).getToolWindow(AqlConsoleWindow.WINDOW_ID); if (window == null) { return false; } return true; } public CharSequence extractQuery(final Project project, @NotNull final AnActionEvent anActionEvent) { final Editor editor = anActionEvent.getData(CommonDataKeys.EDITOR_EVEN_IF_INACTIVE); assert project != null; assert editor != null; final Document document = editor.getDocument(); return extractText(editor, document); } /** * Execute selection only if there is one, document otherwise */ private CharSequence extractText(final Editor editor, final Document document) { final Caret caret = editor.getCaretModel().getPrimaryCaret(); if (caret.hasSelection()) { return caret.getSelectedText(); } return document.getCharsSequence(); } }
<reponame>cwadrupldijjit/stallion-security-ng2 import { Component, Input, OnInit, AfterContentInit, ElementRef } from 'angular2/core'; import { Parallax, ParallaxConfig } from '../../directives/parallax/parallax.directive'; import { logoResize, routeLoaded } from '../../app.component'; @Component({ templateUrl: 'app/components/HomeComponent/home.html', styleUrls: [ 'app/styles/home.styles.css' ], selector: 'home-html', directives: [Parallax] }) export class HomeComponent implements OnInit, AfterContentInit { ngOnInit() { logoResize.emit(null); document.getElementById('home-banner').style.backgroundImage = 'url(' + this.bannerImage + ')'; document.getElementById('welcome-text').innerHTML = this.welcomeText; } ngAfterContentInit() { routeLoaded.emit(null); // debugger; } hostElement: HTMLElement; bannerParallaxConfig: ParallaxConfig = { scrollerId: 'route', parallaxInitVal: -250, parallaxRatio: .8 }; welcomeParallaxConfig = { name: 'site-welcome', scrollerId: 'route', parallaxRatio: 1, parallaxCss: 'top', parallaxInitVal: -100, parallaxIf: true, maxValue: 0 }; textParallaxConfig = { name: 'site-welcome', scrollerId: 'route', parallaxRatio: .6, parallaxCss: 'top', parallaxInitVal: -70, parallaxIf: true, maxValue: 0 }; siteWelcome: string = 'Welcome to the website for Stallion Security'; welcomeText: string = `Bacon ipsum dolor amet t-bone rump prosciutto leberkas. Ham hock kielbasa andouille turkey drumstick frankfurter shank. Ham hock tri-tip tail shoulder meatloaf, pig ball tip turkey bresaola shankle beef kielbasa pastrami. Drumstick corned beef flank spare ribs jowl short ribs prosciutto turducken pork loin tongue cow. Meatloaf salami cupim boudin, chuck short loin t-bone drumstick.<br><br> Pork belly jowl sirloin pork. Porchetta short ribs chicken t-bone, andouille fatback biltong leberkas kielbasa sirloin chuck bacon. Prosciutto meatball sirloin chicken turkey shoulder doner frankfurter salami tail tenderloin pork boudin venison kielbasa. Tail short ribs bresaola pork belly beef. Jowl pastrami brisket kevin ball tip, bresaola meatloaf pork. Turducken fatback pork loin sausage frankfurter boudin jerky salami pig venison drumstick ground round beef ribs short loin tenderloin.<br><br> Cow salami jowl prosciutto jerky alcatra sirloin meatball tri-tip t-bone ribeye shoulder swine. Venison corned beef turducken pork tongue boudin cow ham hock pork loin tri-tip landjaeger picanha pancetta meatloaf. Tenderloin prosciutto ribeye ham hock. Ground round drumstick tongue pig pork loin cow t-bone beef biltong turducken. Fatback strip steak prosciutto short loin beef pancetta alcatra sausage drumstick pork belly tongue. Sirloin frankfurter ground round ribeye hamburger, tenderloin pork belly shank t-bone prosciutto strip steak shankle chicken. Brisket drumstick shoulder fatback ham hamburger picanha leberkas kielbasa prosciutto.<br>`; bannerImage: string = 'app/assets/running-horse.jpg'; // bannerImage: string = 'app/assets/dawn-190055.jpg'; @Input() scrollElement: HTMLElement; constructor(element: ElementRef) { this.hostElement = element.nativeElement; } }
package azuremediaprocessor; import azuremediaprocessor.Observer; import azuremediaprocessor.State; import java.util.concurrent.CopyOnWriteArrayList; public class Subject { private CopyOnWriteArrayList<Observer> observers = new CopyOnWriteArrayList<>(); public void addObserver(Observer o) { observers.add(o); } public void deleteObserver(Observer o) { observers.remove(o); } public void notifyObservers(State state) { for (Observer o : observers) { o.notify(state); } } }
CIA Tells FOIA Requester That He Needs To Know Everything About The Emails He's Requesting Before He Can Request Them from the using-the-rarely-seen-tautology-exemption dept More FOIA-related nonsense, this time from the CIA. Michael Morisy, co-founder of MuckRock, sent a request for internal emails discussing (rather ironically) the fact that the CIA's "FOIA Portal" seems to suffer from extended periods of downtime. This is a request under the Freedom of Information Act. I hereby request the following records: A copy of emails sent to or from the CIA's FOIA office regarding the FOIA Portal's Technical Issues. According to the CIA's current FOIA website: "FOIA requests cannot currently be made online due to technical issues. Requests can still be submitted via the US Postal Service and facsimile." http://www.foia.cia.gov/ Please also include any attachments to these emails. The FOIA requires requesters to "reasonably describe" the information they seek so that professional employees familiar with the subject matter can locate responsive information with a reasonable amount of effort. Commonly this equates to a requirement that the documents must be locatable through the indexing of our various systems. Extremely broad or vague requests or requests requiring research do not satisfy this requirement. We require requesters seeking any form of "electronic communications" such as emails, to provide the specific "to" and "from" recipients, time frame and subject. We note that you have provided the subject only. Therefore, we must decline your request. You can't see any emails or know who sent or received them. But you must request the exact email and who sent and received it. Given the agency's disdain for the FOIA process (second only to the NYPD ), I'm sure this sort of outage is viewed as a feature, not a bug. But whatever internal musings the CIA had about its FOIA portal issues will apparently be staying internal for the time being. The CIA rejected Morisy's request in full , basically stating that searching for emails is hard work and that the requester could have at least bothered to know exactly who was talking about the portal issues and exactly when they were doing it before making the request.Obviously, a FOIA requester isn't going to know these sorts of specifics beforehand, hence THE REQUEST FOR INFORMATION. As MuckRock's JPat Brown points out, Twitter user Mythosopher had perhaps the best response to this refusal ...... along with this graphic The CIA has pretty much ensured many requests will be found too cumbersome to comply with. It used 2013's brief sequester as an excuse to shut down its office in charge of declassifying historical documents and fold it in with the FOIA department's steady stream of extension requests and denials. And the CIA joins an ever-lengthening list of federal agencies completely mystified by internal email systems. Oddly, this same government expects the US public to trust that agencies like the FBI, CIA, NSA and countless law enforcement entities will be able to find the needles in your personal email haystacks -- obtained in bulk with FISA court orders, NSLs or old-fashioned open-ended, non-specific warrants.The CIA itself has already raided internal networks to root out Senate staffers and whistleblowers, but no one heard anyone complain about the lack of specifics making the job too tough to do. It's only when the public asks to dip into the government's business that these agencies suddenly start acting like the impossible is being demanded. Filed Under: cia, foia, secrecy, transparency
/* * Get the MAC address of the GE MAC unit */ void big_sur_ge_get_mac_unit(big_sur_ge *emac, unsigned int *addr) { unsigned long mac_addr_hi, mac_addr_lo; mac_addr_hi = BIG_SUR_GE_READ(emac->base_address + BIG_SUR_GE_SAH_OFFSET); mac_addr_lo = BIG_SUR_GE_READ(emac->base_address + BIG_SUR_GE_SAL_OFFSET); addr[0] = (mac_addr_hi >> 8); addr[1] = mac_addr_hi; addr[2] = (mac_addr_lo >> 24); addr[3] = (mac_addr_lo >> 16); addr[4] = (mac_addr_lo >> 8); addr[5] = mac_addr_lo; }
/** Tries to parse the given string as a <code>long</code> in the given <code>locale</code>. @param string the string to parse as a <code>long</code> @param locale the <code>Locale</code> to use for parsing @param pattern the DecimalFormat pattern to use for parsing @return a <code>long</code> value derived from the string @exception ParseException if the string cannot be parsed as an <code>long</code> @aribaapi documented @see java.text.DecimalFormat */ public static long parseLong (String string, Locale locale, String pattern) throws ParseException { Assert.that(locale != null, "invalid null Locale"); DecimalFormat fmt = null; try { fmt = acquireDecimalFormat(locale, pattern); return fmt.parse(string).longValue(); } catch (NumberFormatException e) { throw new ParseException(e.getMessage(), 0); } finally { releaseDecimalFormat(fmt,locale, pattern); } }
With the recent release of Ubuntu 16.04, the inevitable flood of “When is Loki coming?” posts have begun pouring in. So here we are to help answer the question that some of you know the answer to already, “When it’s ready ;)”. But what exactly does that mean? How do we (and you) know when the 4th major release of elementary OS, code named Loki, will be ready to use? How It’s Made If this is your first time following along with elementary OS development then it’s very important for us to note that we work a little bit differently. Instead of basing our releases on how much time has passed, we base them on how much work we can get done. We want every new release of elementary OS to be significantly better than the last release. We don’t hold back updates from previous releases unless it’s necessary on a technical level. That’s why our last release, Freya, had several updates throughout the past year. Major releases happen when large technical changes happen. For developers, you’ll know these changes as “API breaks”. Currently, elementary OS releases are built using the Ubuntu package repositories (repos). At the beginning of the cycle, we choose a target repo to work from. For Loki, that repo is Ubuntu 16.04. We set up a daily repo where we build our source code against that Ubuntu repo. Sometimes things build straight away and sometimes builds fail because of things like API breaks. We find the causes of the build failures and update our source code until everything compiles. Alpha The next step is to generate daily disk images with our repository and the target (Ubuntu 16.04) repository. We do this using a system called metapackage “Seeds”. It’s the same way Ubuntu is built and it’s documented in pretty fine detail here. This is the first step to making a new release and we usually refer to it as “Pre-Alpha”. After this step, we have a testing platform that developers can use to make sure all of our code compiles before sending it to the build servers. This isn’t a release that’s ready for regular people to try to run, so we don’t usually publish Alpha releases to the public. Then, we talk about the projects we’re most interested in doing for the next release: big new features that our users are asking for, places where we can make our code faster and more reliable, and things we can work on with the wider open source community. To track all the changes we want to implement, we use a system called “Milestones” on Launchpad. The first milestone we create is called “Alpha 1”. A completed Alpha represents a barely working version of the next release. It’s a daily built disk image that has many of the new features we talked about and shouldn’t contain any massive show-stopping issues, but are generally considered to still be unstable. Depending on how many new things we want to do, we could have multiple alphas during this phase. There were some pretty big API breaks between Freya and Loki and we made it a goal to reduce the time that this cycle would last since we got a lot of feedback that users wanted faster OS releases. So instead of having multiple Alpha milestones this cycle, we had just one with the most important things. Beta We then create a milestone “Beta 1” which would be the first public testing release of elementary OS. This release needs to represent a somewhat stable and feature complete product, so it needs to contain fixes for any serious known regressions or important new features. When we release Beta 1, it is for developers and testers to help us find any major regressions before we release the stable version of Loki (called GM or Grand Master). The Beta1 milestone represents a kind of “home stretch” where we wrap up big features and get the OS ready for translations, screenshots, and documentation. This is what we’re working on right now. Depending on how Beta1 goes, there may or may not be additional beta releases. As you probably know, most of the people who work on elementary OS are either completely unpaid volunteers or bounty hunters. As of this writing, there are just 3 people regularly employed to work at elementary. We rely on bounties to attract developers who will help us get releases out as soon as we possibly can. So if you’re interested in getting Loki out the door and you have a few dollars to spare, we’d love your help funding work on Loki. Consider picking one of these issues and copy/pasting the URL into Bountysource. Multiple people can back a single issue at once, so every little bit counts! Release Time Once we’re confident we have a shippable product, there’s one last milestone series: Release Candidate or “RC”. This milestone is often very small, containing only a handful of minor cosmetic issues. It includes things like preparing our release notes and changing the wallpaper to the new default as well as switching from our unstable daily repo to our stable repo that only contains stable released software that is ready to be used by regular folks. RC’s often come out just days apart and are the last chance to find any last-minute show-stopping issues. Once an RC is released that the team is happy with, that ISO file gets promoted to “GM”. We upload it to our CDN, make copies available to press, and set the final release date for Loki. So watch out for the Beta announcement and soon after for the final release of elementary OS 0.4 Loki! A Big Thanks to Everyone! We want to say thanks again to everyone who has helped us with funding Loki development so far. We’re very excited to continue to grow elementary and help people make a living writing open source software. If you’re interested in helping fund bounties, but don’t particularly care which issues get backed check out this page. If you want to help us with funding, but aren’t particularly interesting in Bountysource specifically, please consider going to this page. If you’re a developer, there’s money to be had helping us get this Beta out! If nothing on this list interests you, you can find a more complete list of bountied issues here. If you’ve never developed for elementary OS before and you’re interested in learning more about that, check out our Getting Started developer guide. Edit: It seems like there might be a problem with redirecting to launchpad. If you get a 404, try replacing “%20″ with “+” in the URL in your browser.
j = [] for i in range(5): L = input().split() print(abs(i-2)+abs(L.index('1')-2)) if '1' in L else 10000
def _run_evasion(self, evas, x0, y0, expected_x=None, expected_y=None): self.logger.info("Malicious sample: " + str(x0)) self.logger.info("Is sparse?: " + str(x0.issparse)) with self.logger.timer(): y_pred, scores, adv_ds, f_obj = evas.run(x0, y0) self.logger.info("Starting score: " + str( evas.classifier.decision_function(x0, y=1).item())) self.logger.info("Final score: " + str(evas.f_opt)) self.logger.info("x*:\n" + str(evas.x_opt)) self.logger.info("Point sequence:\n" + str(evas.x_seq)) self.logger.info("Score sequence:\n" + str(evas.f_seq)) self.logger.info("Fun Eval: " + str(evas.f_eval)) self.logger.info("Grad Eval: " + str(evas.grad_eval)) self.assertEqual(1, y_pred.size) self.assertEqual(1, scores.shape[0]) self.assertEqual(1, adv_ds.num_samples) self.assertEqual(adv_ds.issparse, x0.issparse) self.assertTrue(is_float(f_obj)) if expected_x is not None: self.assert_array_almost_equal( evas.x_opt.todense().ravel(), expected_x, decimal=4) if expected_y is not None: self.assert_array_almost_equal(y_pred.item(), expected_y)
// VerifyKeyPair is a helper that checks if the given secret key corresponds to the given public key func VerifyKeyPair(sk SK, pk PK) bool { var target PK _, first := pk.(*FP256BN.ECP) if first { target = &(*FP256BN.ECP_generator().Mul(sk)) } else { target = &(*FP256BN.ECP2_generator().Mul(sk)) } return pkEqual(pk, target) }
package org.shaneking.book.isbn9787111566489.s4c2; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.tree.ParseTree; import java.io.IOException; public class Calc { public static void main(String[] args) throws IOException { ANTLRInputStream antlrInputStream = new ANTLRInputStream(System.in); LabeledExprLexer labeledExprLexer = new LabeledExprLexer(antlrInputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(labeledExprLexer); LabeledExprParser labeledExprParser = new LabeledExprParser(commonTokenStream); ParseTree parseTree = labeledExprParser.prog(); EvalVisitor evalVisitor = new EvalVisitor(); evalVisitor.visit(parseTree); } }
def post(self): try: data = request.get_json() driver_name = request.headers.get('X-DRIVER-NAME', '*') headers = { 'X-DRIVER-NAME': driver_name } app.config['LOADER_RMQ'].publish_updates( updates=data, headers=headers) res = { 'message': 'Updates published successfully', } return res, 202 except BadRequest as err: api.abort(400, errors=err.description) except: app.logger.exception('Error sending updates to rabbitmq') res = {'message': 'Error sending updates to queue'} return api.abort(500, errors=res)
/** * Teacher. * * @author Sergey Nazarov * @version $Id$ * @since 2018.02.19 */ public class Teacher extends Profession { /** * Учим студента * * @param studient студент * @return описание процесса. */ public String teach(Studient studient) { return this.getProfession() + " " + this.getName() + " учит " + studient.getName(); } }
/// Create a new `Metrics` handle with the given interval and callback. /// /// * `interval` the maximum time that a metric can span. /// * `callback` that is invoked with the newly created metric passed in. pub fn new<F>( interval: Duration, mut callback: F, ) -> Self where F: FnMut(&Metric) + Send + 'static { let (tx, rx): (Sender<Measurement>, Receiver<Measurement>) = mpsc::channel(); executor().submit(move || { let mut distributions: HashMap<String, Distribution> = HashMap::new(); while let Ok(measurement) = rx.recv() { // Get the distribution for the given measurement let key = format!("{}{}{}", measurement.label, measurement.tags, measurement.unit); let mut distribution = distributions.entry(key.clone()) .or_insert_with(|| Distribution::from(&measurement)); if !distribution.values.is_empty() { let first = distribution.first.unwrap(); let time = match first.checked_add(interval) { Some(s) => s, None => panic!("Failed to determine distribution duration") }; if time.lt(&measurement.at) { Metrics::metric_from(distribution, &mut callback); distributions.insert(key.clone(), Distribution::from(&measurement)); distribution = distributions.get_mut(&key).unwrap(); } } // Update the distribution with the given measurement match distribution.add(&measurement) { Ok(_) => {} Err(e) => match e { MetricsError::Unrelated(..) => panic!("{}", e), _ => continue } } } for distribution in distributions.values() { Metrics::metric_from(distribution, &mut callback); } }); Metrics { tx } }
import { parse, eval as expEval } from 'expression-eval' import { Query } from './query' import { Where } from './where' import deepCopy from "ts-deepcopy"; /** * The From class extends Query functionality with the ability to * Join arrays together. Additionally, after joins are completed * you can execute a "Where" expression, which will convert the * from dataset into a Where dataset. * @extends Query */ export class From extends Query { /** * Join this array to another array. Require a join identifier, which will * match the joined values. If you have multiple joins, this may duplicate * records on the "from" side * @param join The array to join * @param identifier The identifier to use for the joined objects * @param fromAttributeName the From array attribute to match on * @param joinAttributeName the join array attribute to match on * @param innerJoin Defaults to false (outer join), set to true to perform an inner join (exclude nulls from from) * @returns A new "From" object containing the resulting join */ public join (join: Array<any>, identifier: string, fromAttributeName: string, joinAttributeName: string, innerJoin = false): From { // merge the join array and the from array together, based on the attributes const joinedArray: Array<any> = [] for (const item of this.getQueryArray()) { let foundJoin = false for (const joinItem of join) { if (item[fromAttributeName] === joinItem[joinAttributeName]) { foundJoin = true const joinedItem = deepCopy(item) joinedItem[identifier] = {} for (const att in joinItem) { if (Object.prototype.hasOwnProperty.call(joinItem, att)) { joinedItem[identifier][att] = joinItem[att] } } joinedArray.push(joinedItem) } } // If it's an outer join and we haven't found a match, we should // add the record with a null identifier attribute. if (!innerJoin && !foundJoin) { const joinedItem = deepCopy(item) joinedItem[identifier] = null joinedArray.push(joinedItem) } } return new From(joinedArray) } /** * Uses the expression-eval library to evaluate an epxression. This does * not at any point execute an eval() statement, however you should be * cautious with any expression evaluation. * Your expression must result in a boolean, ie: 'attribute === 42' * You can pass in null to return all results without evaluating * @param expression An expression */ public where (expression: string | null = null): Where { if (expression && expression.length > 0) { const passed: Array<any> = [] const ast = parse(expression) for (const item of this.getQueryArray()) { try { const result = expEval(ast, item) if (result) { passed.push(deepCopy(item)) } } catch (err) { // failed to evaluate expression, usually a bad attribute value, or missing attribute from a join console.error(err) } } return new Where(passed) } else { return new Where(this.getQueryArray()) } } /** * A wrapper around the default Array.filter function. An * alternative to using the where expression evaluate function * @param predicate Your filter function */ public whereFilter (predicate: (value: any, index: number, array: any[]) => unknown): Where { return new Where(this.getQueryArray().filter(predicate)) } }
“We will make an announcement, on that, on Tuesday,” Paul told reporters when asked about whether he will remain in the race if he drops to the undercard debate, according to the Boston Globe . A spokesman later told the newspaper that the senator was referring to whether he would participate in the undercard debate next Tuesday. ADVERTISEMENT The Hill reported Friday that Paul faces a serious risk of missing the prime-time debate, depending on how his polling this weekend goes. Under CNN’s rules, a candidate can make the main stage if they garner 3.5 percent in an average of national polls or 4 percent in an average of polls in either Iowa or New Hampshire. Paul’s best hope is a weekend bump in Iowa polling. He garnered 3 percent support in a Bloomberg Politics/Des Moines Register poll released Saturday afternoon. CNN will reveal the lineup for the debate on “State of the Union” on Sunday morning. Paul on Friday called for CNN to tweak its rules in a similar manner to how the network approached its September debate. “We think if they give us the same treatment that Carly Fiorina was given last time, that you measure from debate to debate, that we do meet the criteria,” Paul said on Fox News. “I have every expectation that I will be treated fairly. But I want the same and equal treatment that other candidates have gotten in the past,” he said. “We have a first-tier campaign and we don't plan on being labeled by the mainstream media anything less.” This report was updated at 8:46 a.m. on Dec. 13.
<gh_stars>0 package ren.gui.seqEdit; import ren.gui.ParameterMap; import ren.tonal.TonalManager; import ren.util.PO; public class TonalNTGC extends ParamNTGC { private TonalManager tm; public TonalNTGC(){} public TonalNTGC construct(TonalManager tm, ParameterMap scope, ParameterMap quantise, ParameterMap shuffle) { // PO.p("scope max in tonal ntgc = " + scope.getMax()); //this.getScope().getMax()); super.construct(scope, quantise, shuffle); this.tm = tm; return this; } //public TonalNTCG() { // this(new TonalManager()); //} /** * Get the Tm value. * @return the Tm value. */ public TonalManager getTm() { return tm; } /** * Set the Tm value. * @param newTm The new Tm value. */ public void setTm(TonalManager newTm) { this.tm = newTm; } /// public int [] getTonalPitchesY() {} }
How Far Is Distance Learning From Education? With knowledge doubling every year or so, “expertise” now has a shelf life measured in days; everyone must be both learner and teacher; and the sheer challenge of learning can be managed only through a globe-girdling network that links all minds and all knowledge. I call this new wave of technology hyperlearning. . . . It is not a single device or process, but a universe of new technologies that both possess and enhance intelligence. The hyper in hyperlearning refers not merely to the extraordinary speed and scope of new information technology, but to an unprecedented degree of connectedness of knowledge, experience, media, and brains—both human and nonhuman. . . . We have the technology today to enable virtually anyone who is not severely handicapped to learn anything, at a “grade A” level, anywhere, anytime.
module Data.String.Strip where import Data.Char import qualified Data.Map as Map data GameState = Playing | Victory deriving (Eq, Show) data Board = Board Int Int (Map.Map Point Cell) data Point = Point Int Int deriving (Ord, Eq) data CellContent = Empty Int | Mine deriving (Eq, Show) data Cell = Cell CellContent Bool deriving (Eq, Show) isEmpty :: Board -> Int -> Int -> Bool isEmpty (Board _ _ m) x y = isPointEmpty m (Point x y) isFlagged :: Board -> Int -> Int -> Bool isFlagged b x y = let (Cell _ t) = cellAt b x y in t isPointEmpty :: (Map.Map Point Cell) -> Point -> Bool isPointEmpty m p = not $ Map.member p m cellAt :: Board -> Int -> Int -> Cell cellAt (Board _ _ m) x y = cellAtPoint m (Point x y) cellAtPoint :: (Map.Map Point Cell) -> Point -> Cell cellAtPoint m p = case Map.lookup p m of Just c -> c otherwise -> Cell (Empty (-1)) False countMines :: (Map.Map Point Cell) -> Int -> Int -> Int countMines m x y = let points = [Point (x+1) y] empties = map (\p -> isPointEmpty m p) points in length $ filter not empties countCorrectCells :: Board -> Int countCorrectCells (Board _ _ m) = let cells = Map.elems m corrects = map isCellCorrect cells in length $ filter id corrects isCellCorrect :: Cell -> Bool isCellCorrect (Cell c f) = case c of Empty n -> n /= (-1) Mine -> f mkBoard :: Int -> Int -> Board mkBoard w h = Board w h Map.empty addMine :: Board -> Int -> Int -> Board addMine (Board h w m) x y = let p = Point x y in Board h w (Map.insert p (Cell Mine False) m) select :: Board -> Int -> Int -> Board select (Board h w m) x y = let p = Point x y in Board h w (Map.insert p (Cell (Empty $ countMines m x y) False) m) toggleFlag :: Board -> Int -> Int -> Board toggleFlag b@(Board h w m) x y = let p = Point x y (Cell value flag) = cellAt b x y in Board h w (Map.insert p (Cell value $ not flag) m) getStatus :: Board -> GameState getStatus b@(Board h w m) = let numCells = h * w count = countCorrectCells b in case count == numCells of True -> Victory False -> Playing
// RedisUpdate loads a catalog of per endpoint and api key // into Redis func RedisUpdate(conn redis.Conn, ail *APIIndexedLimits) { n := time.Now() updateTime, err := RedisStartUpdate(conn, n) if err != nil { fmt.Printf("cannot start redis update: %s\n", err.Error()) return } if updateTime == nil { return } RedisUpdateLimits(conn, ail) RedisFinishUpdate(conn, n, "v0.0.1", ail.Version.HashVer) }
/** * show the data * * @param dataSetDescriptor the dataset descriptor * @param showPlot show the graphical plot * @param showTable show the table */ @Override public void showNewStellarData(@NotNull DataSetDescriptor dataSetDescriptor, boolean showPlot, boolean showTable) { setContextDataSet(dataSetDescriptor); showNewStellarData(showPlot, showTable); }
/* Copyright 2003-2019 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Song_Lightness extends Song { @Override public String ID() { return "Song_Lightness"; } private final static String localizedName = CMLib.lang().L("Lightness"); @Override public String name() { return localizedName; } @Override public int abstractQuality() { return Ability.QUALITY_MALICIOUS; } @Override protected boolean HAS_QUANTITATIVE_ASPECT() { return false; } @Override public void affectPhyStats(final Physical affected, final PhyStats affectableStats) { super.affectPhyStats(affected,affectableStats); affectableStats.setWeight(0); } public int mobWeight(final MOB mob) { int weight=mob.basePhyStats().weight(); for(int i=0;i<mob.numItems();i++) { final Item I=mob.getItem(i); if((I!=null)&&(!I.amWearingAt(Wearable.WORN_FLOATING_NEARBY))) weight+=I.phyStats().weight(); } return weight; } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { if((affected!=null) &&(affected instanceof MOB) &&(msg.amISource((MOB)affected)) &&(msg.targetMinor()==CMMsg.TYP_GET) &&(msg.target() instanceof Item) &&(((msg.tool()==null)||(msg.tool() instanceof MOB)))) { final MOB mob=msg.source(); if((((Item)msg.target()).phyStats().weight()>(mob.maxCarry()-mobWeight(mob))) &&(!mob.isMine(msg.target()))) { mob.tell(L("@x1 is too heavy.",((Item)msg.target()).name(mob))); return false; } } return super.okMessage(myHost,msg); } @Override public void unInvoke() { // undo the affects of this spell if(!(affected instanceof MOB)) return; final MOB mob=(MOB)affected; super.unInvoke(); if(canBeUninvoked()) mob.tell(L("Your normal weight returns.")); } @Override public int castingQuality(final MOB mob, final Physical target) { if(mob!=null) { if(mob.isInCombat()&&(mob.isMonster())) return Ability.QUALITY_INDIFFERENT; } return super.castingQuality(mob,target); } }
<reponame>jreece1567/go_swagger_client package client_secure // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" strfmt "github.com/go-openapi/strfmt" "restclient/client_secure/i_n_t_e_g_r_a_t_i_o_n_s" "restclient/client_secure/p_a_r_k_i_n_g" "restclient/client_secure/p_a_y_m_e_n_t_s" "restclient/client_secure/p_e_o_p_l_e_a_u_t_h" "restclient/client_secure/p_e_o_p_l_e_c_o_n_s_u_m_e_r" "restclient/client_secure/p_e_o_p_l_e_m_a_n_a_g_e_m_e_n_t" "restclient/client_secure/s_t_a_f_f_a_u_t_h" "restclient/client_secure/s_t_a_f_f_m_a_n_a_g_e_m_e_n_t" "restclient/client_secure/w_i_s_h_l_i_s_t_s" ) // Default westfield apis HTTP client. var Default = NewHTTPClient(nil) // NewHTTPClient creates a new westfield secure-apis HTTP client. func NewHTTPClient(formats strfmt.Registry) *WestfieldSecureApis { if formats == nil { formats = strfmt.Default } transport := httptransport.New("secure.westfield.io", "/v1", []string{"https"}) return New(transport, formats) } // New creates a new westfield apis client func New(transport runtime.ClientTransport, formats strfmt.Registry) *WestfieldSecureApis { cli := new(WestfieldSecureApis) cli.Transport = transport cli.INTEGRATIONS = i_n_t_e_g_r_a_t_i_o_n_s.New(transport, formats) cli.PARKING = p_a_r_k_i_n_g.New(transport, formats) cli.PAYMENTS = p_a_y_m_e_n_t_s.New(transport, formats) cli.PEOPLEAUTH = p_e_o_p_l_e_a_u_t_h.New(transport, formats) cli.PEOPLECONSUMER = p_e_o_p_l_e_c_o_n_s_u_m_e_r.New(transport, formats) cli.PEOPLEMANAGEMENT = p_e_o_p_l_e_m_a_n_a_g_e_m_e_n_t.New(transport, formats) cli.STAFFAUTH = s_t_a_f_f_a_u_t_h.New(transport, formats) cli.STAFFMANAGEMENT = s_t_a_f_f_m_a_n_a_g_e_m_e_n_t.New(transport, formats) cli.WISHLISTS = w_i_s_h_l_i_s_t_s.New(transport, formats) return cli } // WestfieldSecureApis is a client for westfield secure-apis type WestfieldSecureApis struct { INTEGRATIONS *i_n_t_e_g_r_a_t_i_o_n_s.Client PARKING *p_a_r_k_i_n_g.Client PAYMENTS *p_a_y_m_e_n_t_s.Client PEOPLEAUTH *p_e_o_p_l_e_a_u_t_h.Client PEOPLECONSUMER *p_e_o_p_l_e_c_o_n_s_u_m_e_r.Client PEOPLEMANAGEMENT *p_e_o_p_l_e_m_a_n_a_g_e_m_e_n_t.Client STAFFAUTH *s_t_a_f_f_a_u_t_h.Client STAFFMANAGEMENT *s_t_a_f_f_m_a_n_a_g_e_m_e_n_t.Client WISHLISTS *w_i_s_h_l_i_s_t_s.Client Transport runtime.ClientTransport } // SetTransport changes the transport on the client and all its subresources func (c *WestfieldSecureApis) SetTransport(transport runtime.ClientTransport) { c.Transport = transport c.INTEGRATIONS.SetTransport(transport) c.PARKING.SetTransport(transport) c.PAYMENTS.SetTransport(transport) c.PEOPLEAUTH.SetTransport(transport) c.PEOPLECONSUMER.SetTransport(transport) c.PEOPLEMANAGEMENT.SetTransport(transport) c.STAFFAUTH.SetTransport(transport) c.STAFFMANAGEMENT.SetTransport(transport) c.WISHLISTS.SetTransport(transport) }
def rasp(options=None): warnings.warn('tvm.target.rasp() is going to be deprecated. ' 'Please use tvm.target.arm_cpu("rasp3b")') return arm_cpu('rasp3b', options)
<reponame>openregister/generate-picker-data-file package uk.gov; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import org.apache.commons.io.IOUtils; public class Fetcher { public static String get(String urlToRead) throws IOException { URL url = new URL(urlToRead); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); return IOUtils.toString(conn.getInputStream(), "UTF-8"); } }
<filename>marathoner/communicators/peg_jumping.py from six.moves import xrange class Communicator(object): def communicate(self, visualizer, solution, visualizer_cb, solution_cb): line = visualizer.readline() # M visualizer_cb(line, flush=False) M = int(line) for i in xrange(M): visualizer_cb(visualizer.readline(), flush=False) # pegValue[i] line = visualizer.readline() # N visualizer_cb(line, flush=False) N = int(line) for i in xrange(N): visualizer_cb(visualizer.readline(), flush=False) # board[i] visualizer_cb('', flush=True) # read solution line = solution.readline() # ret_length solution_cb(line, flush=False) ret_length = int(line) for i in xrange(ret_length): solution_cb(solution.readline(), flush=False)
<filename>app/src/main/java/com/besome/sketch/tools/CompileLogActivity.java package com.besome.sketch.tools; import android.app.AlertDialog; import android.graphics.Typeface; import android.os.Bundle; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.HorizontalScrollView; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.NumberPicker; import android.widget.PopupMenu; import android.widget.ScrollView; import android.widget.TextView; import com.besome.sketch.lib.base.BaseActivity; import com.sketchware.remod.Resources; import mod.hey.studios.util.CompileLogHelper; import mod.hey.studios.util.Helper; public class CompileLogActivity extends BaseActivity { private TextView tv_compile_log; private HorizontalScrollView err_hScroll; private ScrollView err_vScroll; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(Resources.layout.compile_log); View rootLayout = ((ViewGroup) findViewById(android.R.id.content)).getChildAt(0); ImageView back = findViewById(Resources.id.ig_toolbar_back); TextView title = findViewById(Resources.id.tx_toolbar_title); ImageView menu = findViewById(Resources.id.ig_toolbar_load_file); tv_compile_log = findViewById(Resources.id.tv_compile_log); err_hScroll = rootLayout.findViewWithTag("err_hScroll"); err_vScroll = rootLayout.findViewWithTag("err_vScroll"); back.setOnClickListener(v -> onBackPressed()); Helper.applyRippleToToolbarView(back); if (getIntent().getBooleanExtra("showingLastError", false)) { title.setText("Last compile log"); } else { title.setText("Compile log"); } menu.setImageResource(Resources.drawable.ic_more_vert_white_24dp); menu.setVisibility(View.VISIBLE); Helper.applyRippleToToolbarView(menu); final String wrapTextLabel = "Wrap text"; final String monospacedFontLabel = "Monospaced font"; final String fontSizeLabel = "Font size"; PopupMenu options = new PopupMenu(getApplicationContext(), menu); options.getMenu().add(wrapTextLabel).setCheckable(true); options.getMenu().add(monospacedFontLabel).setCheckable(true).setChecked(true); options.getMenu().add(fontSizeLabel); options.setOnMenuItemClickListener(menuItem -> { switch (menuItem.getTitle().toString()) { case wrapTextLabel: menuItem.setChecked(!menuItem.isChecked()); toggleWrapText(menuItem.isChecked()); break; case monospacedFontLabel: menuItem.setChecked(!menuItem.isChecked()); toggleMonospacedText(menuItem.isChecked()); break; case fontSizeLabel: changeFontSizeDialog(); break; default: return false; } return true; }); menu.setOnClickListener(v -> options.show()); String error = getIntent().getStringExtra("error"); if (error == null) { finish(); } else { tv_compile_log.setText(CompileLogHelper.colorErrsAndWarnings(error)); tv_compile_log.setTextIsSelectable(true); } } private void toggleWrapText(boolean isChecked) { if (isChecked) { err_vScroll.removeAllViews(); if (tv_compile_log.getParent() != null) { ((ViewGroup) tv_compile_log.getParent()).removeView(tv_compile_log); } err_vScroll.addView(tv_compile_log); } else { err_vScroll.removeAllViews(); if (tv_compile_log.getParent() != null) { ((ViewGroup) tv_compile_log.getParent()).removeView(tv_compile_log); } err_hScroll.removeAllViews(); err_hScroll.addView(tv_compile_log); err_vScroll.addView(err_hScroll); } } private void toggleMonospacedText(boolean isChecked) { if (isChecked) { tv_compile_log.setTypeface(Typeface.MONOSPACE); } else { tv_compile_log.setTypeface(Typeface.DEFAULT); } } private void changeFontSizeDialog() { NumberPicker picker = new NumberPicker(this); picker.setMinValue(10); //Must not be less than setValue(), which is currently 11 in compile_log.xml picker.setMaxValue(70); picker.setWrapSelectorWheel(false); picker.setValue((int) (tv_compile_log.getTextSize() / getResources().getDisplayMetrics().scaledDensity)); LinearLayout layout = new LinearLayout(this); layout.addView(picker, new LinearLayout.LayoutParams( LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT, Gravity.CENTER)); new AlertDialog.Builder(this) .setTitle("Select font size") .setView(layout) .setPositiveButton(android.R.string.ok, (dialog, which) -> tv_compile_log.setTextSize((float) picker.getValue())) .setNegativeButton(android.R.string.cancel, null) .show(); } }
<filename>src/firstprinciples/8-Recursion/test.hs module Test where brokenFact1 :: Integer -> Integer brokenFact1 n = n * brokenFact1 (n - 1) y :: Integer y = brokenFact1 4 inc :: Num a => a -> a inc = (+1) three :: Integer three = inc . inc . inc $ 0 three' :: Integer -- different syntax, same thing three' = (inc . inc . inc) 0 incTimes :: (Eq a, Num a) => a -> a -> a incTimes 0 n = n incTimes times n = 1 + (incTimes(times - 1) n) applyTimes :: (Eq a, Num a) => a -> (b -> b) -> b -> b applyTimes 0 _ n = n applyTimes times func n = func (applyTimes(times - 1 ) func n) --f . applyTimes(times - 1 ) f $ n incTimes' :: (Eq a, Num a) => a -> a -> a incTimes' times n = applyTimes times (+1) n -- ------------------ f :: Bool -> Int f False = 0 f True = error "blah" f' :: Bool -> Maybe Int f' False = Just 0 f' _ = Nothing fibonacci :: Integral a => a -> a fibonacci 0 = 0 fibonacci 1 = 1 fibonacci n = fibonacci (n - 1) + fibonacci (n - 2)
/** * Updates role permissions. * * @param roleId the specified role id */ @Transactional public void updateRolePermissions(final String roleId, final Set<String> permissionIds) { try { rolePermissionRepository.removeByRoleId(roleId); for (final String permissionId : permissionIds) { final JSONObject rel = new JSONObject(); rel.put(Role.ROLE_ID, roleId); rel.put(Permission.PERMISSION_ID, permissionId); rolePermissionRepository.add(rel); } } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Updates role permissions failed", e); } }
/*============================================================================== Copyright (c) 2001-2010 <NAME> Copyright (c) 2010 <NAME> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) ==============================================================================*/ #ifndef BOOST_PHOENIX_OBJECT_DETAIL_CAST_TARGET_HPP #define BOOST_PHOENIX_OBJECT_DETAIL_CAST_TARGET_HPP namespace boost { namespace phoenix { namespace detail { template <typename T> struct target { typedef T type; }; namespace result_of { template <typename T> struct target { typedef typename proto::detail::uncvref< typename proto::result_of::value<T>::type >::type target_type; typedef typename target_type::type type; }; template <typename T> struct target<T const&> : target<T> {}; template <typename T> struct target<T&> : target<T> {}; } } }} #endif
<filename>src/main/java/jquic/example/http/annotated/package-info.java /** * contains a high level implementation of a {@link AnnotatedHttpServer http server} and a {@link SimpleAnnotatedProxy http proxy}, both taking decisions based on Annotations. <br> * To match certain requests to a method use {@link jquic.example.http.annotated.Request Request}, to match responses use {@link jquic.example.http.annotated.Response Response}. <br> * In addition to the server and proxy itself, all required Annotations and utility classes are contained within this package. */ package jquic.example.http.annotated;
import {Body, Controller, Get, Patch, UseGuards} from '@nestjs/common'; import {ApiBearerAuth, ApiCreatedResponse, ApiTags} from "@nestjs/swagger"; import {ConfigFullDto} from "@studio-lite-lib/api-dto"; import {SettingService} from "../../database/services/setting.service"; import {JwtAuthGuard} from "../../auth/jwt-auth.guard"; import {IsAdminGuard} from "../is-admin.guard"; @Controller('admin/settings') export class SettingController { constructor( private settingService: SettingService, ) {} @Get('config') @ApiCreatedResponse({ type: ConfigFullDto, }) @ApiTags('admin settings') async findConfig(): Promise<ConfigFullDto> { return this.settingService.findConfig() } @Patch('config') @UseGuards(JwtAuthGuard, IsAdminGuard) @ApiBearerAuth() @ApiTags('admin settings') async patchConfig(@Body() settingData: ConfigFullDto) { return this.settingService.patchConfig(settingData) } }
/** * Checks if the passed model class has an {@link RestrictedBy} annotation * * @param modelClass class to be checked for {@link RestrictedBy} annotation * @return flag showing if the given class has an annotation or not */ public static boolean hasAnnotation(Class<?> modelClass) { Class<RestrictedBy> annotationClass = RestrictedBy.class; if (modelClass != null && modelClass.isAnnotationPresent(annotationClass)) { return true; } else { return false; } }
<gh_stars>0 import json import random from collections import defaultdict with open('/home/gabriel/pracegover_projects/pracegover/dataset/ms_coco_annotations/captions_val2014.json') as file: data = json.load(file) captions = defaultdict(lambda: []) for input in data['annotations']: captions[input['image_id']].append(input['caption']) captions = {k:v for i, (k, v) in enumerate(captions.items()) if i < 10000} image_ids = list(captions.keys()) triplets = {'HCI':defaultdict(lambda: []), 'HII':defaultdict(lambda: [])} n = len(captions) index = 0 for img, refs in captions.items(): ref_caption = random.choice(refs) refs = set(refs) refs.remove(ref_caption) refs = list(refs) random_id = img random_caption = None while random_id == img: random_id = random.choice(image_ids) random_caption = random.choice(captions[random_id]) triplets['HCI'][index] = [refs, ref_caption, random_caption, 1] words = ref_caption.split() m = len(words) end = random.randint(m//4, m//2) triplets['HII'][index] = [refs, ' '.join(words[:end]), random_caption, 1] index += 1 print(len(triplets)) with open('mscoco_triplets_complete.json', 'w') as file: json.dump(triplets, file)
/** * Convenience method to execute a command on the given remote session, using the given string * as part of the error if it fails to run. * * @param roboRioSession The ssh session of the roboRio * @param command The command to execute * @param errorString The error string to put in the exception if an error occurs. The return * code will be appended to the end * @throws JSchException If an ssh error occurs * @throws IOException Thrown if there is an io error, or if the command fails to run */ private void executeCommand(Session roboRioSession, String command, String errorString) throws JSchException, IOException { m_logger.debug("Running command " + command); ChannelExec channel = (ChannelExec) roboRioSession.openChannel(EXEC_COMMAND); channel.setCommand(command); channel.connect(); int sleepCount = 0; do { try { Thread.sleep(100); } catch (InterruptedException e) { m_logger.warn("Interrupted exception while waiting for command " + command + " to finish", e); } } while (!channel.isClosed() && sleepCount++ < 100); int res = channel.getExitStatus(); if (res != SUCCESS) { m_logger.debug("Error with command " + command); throw new IOException(errorString + " " + res); } channel.disconnect(); }
<gh_stars>0 import time import threading from mcp9600 import MCP9600 class Thermocouple(threading.Thread): # Set up thermocouple def __init__(self): print("init") threading.Thread.__init__(self) # Default address - assumes breakout doesn't have cut track (Pimoroni) self.mcp9600 = MCP9600(i2c_addr=0x66) # Set Thermocouple Type self.mcp9600.set_thermocouple_type('K') self.cold_junction_temp = None self.hot_junction_temp = None def run(self): hot_temp = [] cold_temp = [] while True: # Get Hot Junction Temperature (thermocouple) hot_temp.append(self.mcp9600.get_hot_junction_temperature()) time.sleep(1) # Get Cold Junction Temperature cold_temp.append(self.mcp9600.get_cold_junction_temperature()) while len(hot_temp) > 3: hot_temp.pop(0) if len(hot_temp) == 3: self.hot_junction_temp = int(sum(hot_temp)/3.0) print("Hot ", self.hot_junction_temp) while len(cold_temp) > 3: cold_temp.pop(0) if len(cold_temp) == 3: self.cold_junction_temp = int(sum(cold_temp)/3.0) print("Cold ", self.cold_junction_temp) time.sleep(1)
<filename>crates/wasi-common/src/old/snapshot_0/wasi32.rs //! Types and constants specific to 32-bit wasi. These are similar to the types //! in the `host` module, but pointers and `usize` values are replaced with //! `u32`-sized types. #![allow(non_camel_case_types)] #![allow(non_snake_case)] #![allow(dead_code)] use crate::old::snapshot_0::wasi::*; use wig::witx_wasi32_types; pub type uintptr_t = u32; pub type size_t = u32; witx_wasi32_types!("phases/old/snapshot_0/witx/wasi_unstable.witx");
def main_loop(conn, exname): consumer = Consumer(conn, exname) consumer.setup() chan = conn.channel() for i in range(options.messages): send_message(chan, exname, i) send_message(chan, exname, message="QUIT") chan.close() return consumer.loop(timeout=options.timeout)
def confidence_propagation_single(tactics_confidence_list, technique_name, technique_confidence_score): new_confidence_score = technique_confidence_score i = 0 for tactic in clt.CODE_TACTICS: if not clt.TACTICS_TECHNIQUES_RELATIONSHIP_DF.loc[clt.TACTICS_TECHNIQUES_RELATIONSHIP_DF[tactic] == technique_name].empty: lambdaim = 1/(np.exp(abs(technique_confidence_score-tactics_confidence_list[tactic]))) new_confidence_score = new_confidence_score + lambdaim * tactics_confidence_list[tactic] i = i+1 return new_confidence_score
Unlike many others in Bowie’s entourage, Schwab was less than flamboyant in her dress, and remained resolutely in the background, never giving interviews. Some people, including the journalist Lesley Ann Jones, thought she was for a time his lover. Bowie said, “I’m glad to say, sex is not all there is. There really have to be relationships in your life to make it all worthwhile.” In her biography, writer Wendy Leigh argued that Schwab was a loving mother-figure in Bowie’s life, making up for the emotional absence of his own mother, Peggy who died in 2001. Journalist Paul Du Noyer said she acted almost as a mobile phone for the star who refused to have one. After Bowie’s death, Tony Zanetta, a friend of the singer, acknowledged the important role played by Schwab, describing her as his 'facilitator'. In a posting on Facebook, Zanetta wrote: "Besides thinking of his family today, my thoughts are very much with Corinne Schwab. Coco has been by David Bowie's side for the past 43 years. "She has been the facilitator for everything that has happened in his remarkable career. My sympathies and best thoughts are with her today. I want to publicly acknowledge her for her loyalty and trust and devotion." Robin Clark, another friend, said: "When we met she [Corinne] had only been working for him for a year....total and undying dedication. Love and our thoughts are also with you."​ But the biggest tribute Bowie paid Schwab was in the 1987 song he wrote about her, Never Let Me Down. He described it as one of his most personal, and it included the lyrics:
package com.ejlchina.okhttps; import com.ejlchina.data.Array; import com.ejlchina.data.Mapper; import com.ejlchina.data.TypeRef; import okio.ByteString; import java.io.InputStream; import java.io.Reader; import java.lang.reflect.Type; import java.util.List; public interface Toable { /** * @return 消息体转字节流 */ InputStream toByteStream(); /** * @return 消息体转字节数组 */ byte[] toBytes(); /** * @return ByteString */ ByteString toByteString(); /** * @return 消息体转字符流 */ Reader toCharStream(); /** * @return 消息体转字符串 */ String toString(); /** * @return 消息体转 Mapper 对象(不想定义 Java Bean 时使用) */ Mapper toMapper(); /** * @return 消息体转 Array 数组(不想定义 Java Bean 时使用) */ Array toArray(); /** * @param <T> 目标泛型 * @param type 目标类型 * @return 报文体Json文本转JavaBean */ <T> T toBean(Class<T> type); /** * @param <T> 目标泛型 * @param type 目标类型 * @return 报文体Json文本转JavaBean */ <T> T toBean(Type type); /** * @param <T> 目标泛型 * @param type 目标类型 * @return 报文体Json文本转JavaBean */ <T> T toBean(TypeRef<T> type); /** * @param <T> 目标泛型 * @param type 目标类型 * @return 报文体Json文本转JavaBean列表 */ <T> List<T> toList(Class<T> type); }
package ease import ( "math" "testing" ) func TestEasingFunctions(t *testing.T) { testValues := map[string][]float32{ "InBack": {-0.07832505, -0.2862844, -0.58335435, -0.9290112, -1.28273125, -1.6039908, -1.85226615, -1.9870336, -1.96776945, -1.75395, -1.30505155, -0.5805504, 0.46007715, 1.8573548, 3.65180625, 5.8839552, 8.59432535, 11.8234404, 15.61182405, 20}, "InBounce": {0.309375, 0.2375, 1.096875, 1.2, 0.546875, 1.3875, 3.346875, 4.55, 4.996875, 4.6875, 3.621875, 1.8, 1.471875, 6.3875, 10.546875, 13.95, 16.596875, 18.4875, 19.621875, 20}, "InCirc": {0.025015644561821, 0.1002512578676, 0.22628006671481, 0.40408205773458, 0.63508326896291, 0.92121597166109, 1.2650060048048, 1.6696972201766, 2.1394289005082, 2.6794919243112, 3.2967069115099, 4, 4.8013158464293, 5.7171431429143, 6.771243444677, 8, 9.4643462471473, 11.282202112919, 13.755002001602, 20}, "InCubic": {0.0025, 0.02, 0.0675, 0.16, 0.3125, 0.54, 0.8575, 1.28, 1.8225, 2.5, 3.3275, 4.32, 5.4925, 6.86, 8.4375, 10.24, 12.2825, 14.58, 17.1475, 20}, "InElastic": {0.01381067932005, 0.0390625, 0.027621358640099, -0.0390625, -0.1104854345604, -0.078125, 0.1104854345604, 0.3125, 0.2209708691208, -0.3125, -0.88388347648318, -0.625, 0.88388347648318, 2.5, 1.7677669529664, -2.5, -7.0710678118655, -5, 7.0710678118655, 20}, "InExpo": {0.0076213586400995, 0.0190625, 0.035242717280199, 0.058125, 0.090485434560398, 0.13625, 0.2009708691208, 0.2925, 0.42194173824159, 0.605, 0.86388347648318, 1.23, 1.7477669529664, 2.48, 3.5155339059327, 4.98, 7.0510678118655, 9.98, 14.122135623731, 20}, "InOutBack": {-0.223541855, -0.75037104, -1.364792985, -1.85111312, -1.993636875, -1.57666968, -0.384516965, 1.79851584, 5.188123305, 10, 14.811876695, 18.20148416, 20.384516965, 21.57666968, 21.993636875, 21.85111312, 21.364792985, 20.75037104, 20.223541855, 20}, "InOutBounce": {0.11875, 0.6, 0.69375, 2.275, 2.34375, 0.9, 3.19375, 6.975, 9.24375, 10, 10.75625, 13.025, 16.80625, 19.1, 17.65625, 17.725, 19.30625, 19.4, 19.88125, 20}, "InOutCirc": {0.0501256289338, 0.20204102886729, 0.46060798583054, 0.83484861008832, 1.3397459621556, 2, 2.8585715714572, 4, 5.6411010564593, 10, 14.358898943541, 16, 17.141428428543, 18, 18.660254037844, 19.165151389912, 19.539392014169, 19.797958971133, 19.949874371066, 20}, "InOutCubic": {0.01, 0.08, 0.27, 0.64, 1.25, 2.16, 3.43, 5.12, 7.29, 10, 12.71, 14.88, 16.57, 17.84, 18.75, 19.36, 19.73, 19.92, 19.99, 20}, "InOutElastic": {0.01953125, -0.01953125, -0.0390625, 0.15625, -0.15625, -0.3125, 1.25, -1.25, -2.5, 10, 22.5, 21.25, 18.75, 20.3125, 20.15625, 19.84375, 20.0390625, 20.01953125, 19.98046875, 20}, "InOutExpo": {0.00953125, 0.0290625, 0.068125, 0.14625, 0.3025, 0.615, 1.24, 2.49, 4.99, 10.005, 15.0075, 17.50875, 18.759375, 19.3846875, 19.69734375, 19.853671875, 19.9318359375, 19.97091796875, 19.990458984375, 20}, "InOutQuad": {0.1, 0.4, 0.9, 1.6, 2.5, 3.6, 4.9, 6.4, 8.1, 10, 11.9, 13.6, 15.1, 16.4, 17.5, 18.4, 19.1, 19.6, 19.9, 20}, "InOutQuart": {0.001, 0.016, 0.081, 0.256, 0.625, 1.296, 2.401, 4.096, 6.561, 10, 13.439, 15.904, 17.599, 18.704, 19.375, 19.744, 19.919, 19.984, 19.999, 20}, "InOutQuint": {0.0001, 0.0032, 0.0243, 0.1024, 0.3125, 0.7776, 1.6807, 3.2768, 5.9049, 10, 14.0951, 16.7232, 18.3193, 19.2224, 19.6875, 19.8976, 19.9757, 19.9968, 19.9999, 20}, "InOutSine": {0.12311659404862, 0.48943483704846, 1.0899347581163, 1.9098300562505, 2.9289321881345, 4.1221474770753, 5.4600950026045, 6.9098300562505, 8.4356553495977, 10, 11.564344650402, 13.090169943749, 14.539904997395, 15.877852522925, 17.071067811865, 18.090169943749, 18.910065241884, 19.510565162952, 19.876883405951, 20}, "InQuad": {0.05, 0.2, 0.45, 0.8, 1.25, 1.8, 2.45, 3.2, 4.05, 5, 6.05, 7.2, 8.45, 9.8, 11.25, 12.8, 14.45, 16.2, 18.05, 20}, "InQuart": {0.000125, 0.002, 0.010125, 0.032, 0.078125, 0.162, 0.300125, 0.512, 0.820125, 1.25, 1.830125, 2.592, 3.570125, 4.802, 6.328125, 8.192, 10.440125, 13.122, 16.290125, 20}, "InQuint": {6.25e-06, 0.0002, 0.00151875, 0.0064, 0.01953125, 0.0486, 0.10504375, 0.2048, 0.36905625, 0.625, 1.00656875, 1.5552, 2.32058125, 3.3614, 4.74609375, 6.5536, 8.87410625, 11.8098, 15.47561875, 20}, "InSine": {0.061653325337442, 0.24623318809724, 0.55260159204647, 0.97886967409693, 1.5224093497743, 2.1798695162326, 2.9471967129182, 3.8196601125011, 4.7918806879994, 5.857864376269, 7.0110390333963, 8.2442949541505, 9.550028705681, 10.920190005209, 12.346331352698, 13.819660112501, 15.331092722882, 16.871310699195, 18.430818085443, 20}, "Linear": {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, "OutBack": {4.38817595, 8.1765596, 11.40567465, 14.1160448, 16.34819375, 18.1426452, 19.53992285, 20.5805504, 21.30505155, 21.75395, 21.96776945, 21.9870336, 21.85226615, 21.6039908, 21.28273125, 20.9290112, 20.58335435, 20.2862844, 20.07832505, 20}, "OutBounce": {0.378125, 1.5125, 3.403125, 6.05, 9.453125, 13.6125, 18.528125, 18.2, 16.378125, 15.3125, 15.003125, 15.45, 16.653125, 18.6125, 19.453125, 18.8, 18.903125, 19.7625, 19.690625, 20}, "OutCirc": {6.2449979983984, 8.7177978870813, 10.535653752853, 12, 13.228756555323, 14.282856857086, 15.198684153571, 16, 16.70329308849, 17.320508075689, 17.860571099492, 18.330302779823, 18.734993995195, 19.078784028339, 19.364916731037, 19.595917942265, 19.773719933285, 19.899748742132, 19.974984355438, 20}, "OutCubic": {2.8525, 5.42, 7.7175, 9.76, 11.5625, 13.14, 14.5075, 15.68, 16.6725, 17.5, 18.1775, 18.72, 19.1425, 19.46, 19.6875, 19.84, 19.9325, 19.98, 19.9975, 20}, "OutElastic": {12.928932188135, 25, 27.071067811865, 22.5, 18.232233047034, 17.5, 19.116116523517, 20.625, 20.883883476483, 20.3125, 19.779029130879, 19.6875, 19.88951456544, 20.078125, 20.11048543456, 20.0390625, 19.97237864136, 19.9609375, 19.98618932068, 20}, "OutExpo": {5.8637222406453, 10.01, 12.941861120323, 15.015, 16.480930560161, 17.5175, 18.250465280081, 18.76875, 19.13523264004, 19.394375, 19.57761632002, 19.7071875, 19.79880816001, 19.86359375, 19.909404080005, 19.941796875, 19.964702040003, 19.9808984375, 19.992351020001, 20}, "OutInBack": {4.0882798, 7.0580224, 9.0713226, 10.2902752, 10.876975, 10.9935168, 10.8019954, 10.4645056, 10.1431422, 10, 9.8568578, 9.5354944, 9.1980046, 9.0064832, 9.123025, 9.7097248, 10.9286774, 12.9419776, 15.9117202, 20}, "OutInBounce": {0.75625, 3.025, 6.80625, 9.1, 7.65625, 7.725, 9.30625, 9.4, 9.88125, 10, 10.11875, 10.6, 10.69375, 12.275, 12.34375, 10.9, 13.19375, 16.975, 19.24375, 20}, "OutInCirc": {4.3588989435407, 6, 7.1414284285428, 8, 8.6602540378444, 9.1651513899117, 9.5393920141695, 9.7979589711327, 9.9498743710662, 10, 10.050125628934, 10.202041028867, 10.460607985831, 10.834848610088, 11.339745962156, 12, 12.858571571457, 14, 15.641101056459, 20}, "OutInCubic": {2.71, 4.88, 6.57, 7.84, 8.75, 9.36, 9.73, 9.92, 9.99, 10, 10.01, 10.08, 10.27, 10.64, 11.25, 12.16, 13.43, 15.12, 17.29, 20}, "OutInElastic": {12.5, 11.25, 8.75, 10.3125, 10.15625, 9.84375, 10.0390625, 10.01953125, 9.98046875, 10, 10.01953125, 9.98046875, 9.9609375, 10.15625, 9.84375, 9.6875, 11.25, 8.75, 7.5, 20}, "OutInExpo": {5.005, 7.5075, 8.75875, 9.384375, 9.6971875, 9.85359375, 9.931796875, 9.9708984375, 9.99044921875, 10, 10.00953125, 10.0290625, 10.068125, 10.14625, 10.3025, 10.615, 11.24, 12.49, 14.99, 20}, "OutInQuad": {1.9, 3.6, 5.1, 6.4, 7.5, 8.4, 9.1, 9.6, 9.9, 10, 10.1, 10.4, 10.9, 11.6, 12.5, 13.6, 14.9, 16.4, 18.1, 20}, "OutInQuart": {3.439, 5.904, 7.599, 8.704, 9.375, 9.744, 9.919, 9.984, 9.999, 10, 10.001, 10.016, 10.081, 10.256, 10.625, 11.296, 12.401, 14.096, 16.561, 20}, "OutInQuint": {4.0951, 6.7232, 8.3193, 9.2224, 9.6875, 9.8976, 9.9757, 9.9968, 9.9999, 10, 10.0001, 10.0032, 10.0243, 10.1024, 10.3125, 10.7776, 11.6807, 13.2768, 15.9049, 20}, "OutInSine": {1.5643446504023, 3.0901699437495, 4.5399049973955, 5.8778525229247, 7.0710678118655, 8.0901699437495, 8.9100652418837, 9.5105651629515, 9.8768834059514, 10, 10.123116594049, 10.489434837048, 11.089934758116, 11.909830056251, 12.928932188135, 14.122147477075, 15.460095002605, 16.909830056251, 18.435655349598, 20}, "OutQuad": {1.95, 3.8, 5.55, 7.2, 8.75, 10.2, 11.55, 12.8, 13.95, 15, 15.95, 16.8, 17.55, 18.2, 18.75, 19.2, 19.55, 19.8, 19.95, 20}, "OutQuart": {3.709875, 6.878, 9.559875, 11.808, 13.671875, 15.198, 16.429875, 17.408, 18.169875, 18.75, 19.179875, 19.488, 19.699875, 19.838, 19.921875, 19.968, 19.989875, 19.998, 19.999875, 20}, "OutQuint": {4.52438125, 8.1902, 11.12589375, 13.4464, 15.25390625, 16.6386, 17.67941875, 18.4448, 18.99343125, 19.375, 19.63094375, 19.7952, 19.89495625, 19.9514, 19.98046875, 19.9936, 19.99848125, 19.9998, 19.99999375, 20}, "OutSine": {1.5691819145569, 3.1286893008046, 4.6689072771181, 6.1803398874989, 7.6536686473018, 9.0798099947909, 10.449971294319, 11.755705045849, 12.988960966604, 14.142135623731, 15.208119312001, 16.180339887499, 17.052803287082, 17.820130483767, 18.477590650226, 19.021130325903, 19.447398407954, 19.753766811903, 19.938346674663, 20}, } easingFunctions := map[string]TweenFunc{ "InBack": InBack, "InBounce": InBounce, "InCirc": InCirc, "InCubic": InCubic, "InElastic": InElastic, "InExpo": InExpo, "InOutBack": InOutBack, "InOutBounce": InOutBounce, "InOutCirc": InOutCirc, "InOutCubic": InOutCubic, "InOutElastic": InOutElastic, "InOutExpo": InOutExpo, "InOutQuad": InOutQuad, "InOutQuart": InOutQuart, "InOutQuint": InOutQuint, "InOutSine": InOutSine, "InQuad": InQuad, "InQuart": InQuart, "InQuint": InQuint, "InSine": InSine, "Linear": Linear, "OutBack": OutBack, "OutBounce": OutBounce, "OutCirc": OutCirc, "OutCubic": OutCubic, "OutElastic": OutElastic, "OutExpo": OutExpo, "OutInBack": OutInBack, "OutInBounce": OutInBounce, "OutInCirc": OutInCirc, "OutInCubic": OutInCubic, "OutInElastic": OutInElastic, "OutInExpo": OutInExpo, "OutInQuad": OutInQuad, "OutInQuart": OutInQuart, "OutInQuint": OutInQuint, "OutInSine": OutInSine, "OutQuad": OutQuad, "OutQuart": OutQuart, "OutQuint": OutQuint, "OutSine": OutSine, } for easingName, values := range testValues { easing := easingFunctions[easingName] begin := float32(0) end := values[len(values)-1] change := end - begin duration := float32(len(values)) var time float32 t.Run(easingName, func(t *testing.T) { for i, value := range values { time += 1 current := easing(time, begin, change, duration) if math.Abs(float64(current-value)) > 0.03 { t.Fatalf("failed %s with value %v \nexpected: %v\ngot: %v\ndiff: %v", easingName, i, value, current, math.Abs(float64(current-value))) } } }) } }
Refined mass-critical Strichartz estimates for Schrödinger operators We develop refined Strichartz estimates at $L^2$ regularity for a class of time-dependent Schr\"{o}dinger operators. Such refinements begin to characterize the near-optimizers of the Strichartz estimate, and play a pivotal part in the global theory of mass-critical NLS. On one hand, the harmonic analysis is quite subtle in the $L^2$-critical setting due to an enormous group of symmetries, while on the other hand, the spacetime Fourier analysis employed by the existing approaches to the constant-coefficient equation are not adapted to nontranslation-invariant situations, especially with potentials as large as those considered in this article. Using phase space techniques, we reduce to proving certain analogues of (adjoint) bilinear Fourier restriction estimates. Then we extend Tao's bilinear restriction estimate for paraboloids to more general Schr\"{o}dinger operators. As a particular application, the resulting inverse Strichartz theorem and profile decompositions constitute a key harmonic analysis input for studying large data solutions to the $L^2$-critical NLS with a harmonic oscillator potential in dimensions $\ge 2$. This article builds on recent work of Killip, Visan, and the author in one space dimension.
package dbRedis import ( "fmt" "github.com/go-redis/redis/v7" "os" ) func NewRedisClient() (*redis.Client, error) { client := redis.NewClient(&redis.Options{ Addr: os.Getenv("RedisAddress"), Password: os.Getenv("RedisPassword"), DB: 0, }) if client.Ping().Val() != "PONG" { return nil, fmt.Errorf("connect to db error") } else { return client, nil } } func FlushDB(rc *redis.Client) { rc.FlushDB() } type RedisKeysStruct struct { UserIdName string // hash UserNameId string // Not implement UserIdPermissions string // hash UserNotAuthorizedAccess string // set UserIDTasks string // list AllPermissions string // string } var RedisKeys = RedisKeysStruct{ UserIdName: "user:id:name", UserNameId: "user:name:id", UserIdPermissions: "user:id:permissions", UserNotAuthorizedAccess: "user:notauthorizedaccess", AllPermissions: "allPermissions", }
Intermediates in the catalytic cycle of copper-quinoprotein amine oxidase from Escherichia coli. Investigations on the reduction of copper quinoprotein amine oxidases (EC 1.4.3.6) by substrate indicate that the nature of the reduced enzyme species formed varies, as judged from the spectroscopic data reported in the literature for different enzymes and substrates. The availability of substantial amounts of overproduced, homogeneous Escherichia coli amine oxidase (ECAO) enabled us to investigate this aspect with a number of different approaches: quantitative titration of enzyme with substrate, stopped-flow kinetic spectrophotometry (anaerobic and semianaerobic), EPR spectroscopy of stable intermediates in the catalytic cycle, and conversions with H2O2 as the oxidant. Reduction of ECAO by a variety of substrates led to spectra (UV/Vis, EPR) identical to those that have been ascribed to the semiquinone form of the topaquinone cofactor. The extent of semiquinone formation was enhanced in the presence of KCN, but the properties of the artificially induced semiquinone were different from those of the spontaneously induced one, as shown by the spectroscopic data and the reactivity toward O2 and H2O2. On titrating ECAO at high concentrations with substrate, evidence was obtained that disproportionation takes place of the semiquinone formed, the reaction most probably proceeding via intermolecular electron transfer, leading to a topaquinone- and Cu-containing enzyme species that is able to perform substrate conversion. The latter, as well as OH•, is probably also formed when H2O2 replaces O2 as oxidant, explaining why substrate conversion with concomitant enzyme inactivation occurs under this condition. Formation of the semiquinone was always preceded by that of a hitherto unknown species with an absorbance maximum at 400 nm. The structure proposed for this species is a protonated form of the aminoquinol cofactor, the Zwitter ionic structure being stabilized by amino acid residues in the active site having opposite charges. Based on the properties observed and the moment of appearance during conversions, a proposal is made for the sequence in which the three reduced enzyme species convert into each other. Investigations on the reduction of copper quinoprotein amine oxidases (EC 1.4.3.6) by substrate indicate that the nature of the reduced enzyme species formed varies, as judged from the spectroscopic data reported in the literature for different enzymes and substrates. The availability of substantial amounts of overproduced, homogeneous Escherichia coli amine oxidase (ECAO) enabled us to investigate this aspect with a number of different approaches: quantitative titration of enzyme with substrate, stopped-flow kinetic spectrophotometry (anaerobic and semianaerobic), EPR spectroscopy of stable intermediates in the catalytic cycle, and conversions with H 2 O 2 as the oxidant. Reduction of ECAO by a variety of substrates led to spectra (UV/Vis, EPR) identical to those that have been ascribed to the semiquinone form of the topaquinone cofactor. The extent of semiquinone formation was enhanced in the presence of KCN, but the properties of the artificially induced semiquinone were different from those of the spontaneously induced one, as shown by the spectroscopic data and the reactivity toward O 2 and H 2 O 2 . On titrating ECAO at high concentrations with substrate, evidence was obtained that disproportionation takes place of the semiquinone formed, the reaction most probably proceeding via intermolecular electron transfer, leading to a topaquinone-and Cu 1؉ -containing enzyme species that is able to perform substrate conversion. The latter, as well as OH⅐, is probably also formed when H 2 O 2 replaces O 2 as oxidant, explaining why substrate conversion with concomitant enzyme inactivation occurs under this condition. Formation of the semiquinone was always preceded by that of a hitherto unknown species with an absorbance maximum at 400 nm. The structure proposed for this species is a protonated form of the aminoquinol cofactor, the Zwitter ionic structure being stabilized by amino acid residues in the active site having opposite charges. Based on the properties observed and the moment of appearance during conversions, a proposal is made for the sequence in which the three reduced enzyme species convert into each other. Copper-quinoprotein amine oxidases (EC 1.4.3.6) consist of two identical subunits, two copper ions and one or two molecules of the cofactor topaquinone (TPQ). 1 The number of TPQs, as determined by titration with hydrazines, varies from enzyme to enzyme (McIntire and Hartman, 1993). As will be reported elsewhere, 2,3 the enzyme investigated here, Escherichia coli amine oxidase (ECAO), contains TPQ in a ratio of 1.0 per enzyme molecule. Distance calculations based on 19 F NMR data of ECAO derivatized with fluorine-substituted phenylhydrazines 3 as well as resonance Raman spectroscopy (Moënne-Loccoz et al., 1995) have indicated that TPQ and Cu 2ϩ are rather far apart from each other in this enzyme and probably also in the other amine oxidases. EPR studies (to be presented elsewhere) have shown that ECAO has only one type of Cu 2ϩ with three nitrogens as ligands, similar to other amine oxidases. The mechanism of action of copper-quinoprotein amine oxidases has recently been reviewed (Klinman and Mu, 1994;Knowles and Dooley, 1994). Briefly, the conversion occurs in the following way (see also the reactions given below, showing the reaction steps as well as the redox couples involved, the latter causing the balances not to fit). The amine group of the substrate adds to the C-5 carbonyl group of TPQ, after which the aldehyde is released from the enzyme, yielding the cofactor in its aminoquinol form (TPQH 2 ⅐NH 2 ) (Reaction 1); as is clearly visible in some enzymes, the latter is able to transfer one electron to the Cu 2ϩ in a reversible way so that the semiquinone form of this cofactor (TPQH ⅐ ⅐NH; Warncke et al. (1994)) is formed and an equilibrium exists between TPQH 2 ⅐NH 2 /Cu 2ϩ and TPQH ⅐ ⅐NH/Cu 1ϩ (Reaction 2); it has been suggested that O 2 reacts with the latter couple, leading to cofactor oxidation and NH 3 plus H 2 O 2 release (Reaction 3). RCH 2 NH 2 ϩ TPQ/Cu 2ϩ 3 RCHO ϩ TPQH 2 ⅐ NH 2 /Cu 2ϩ REACTION 1. In amino acid conversions with pyridoxal phosphate-dependent enzymes, the occurrence of ketimine and aldimine cofactorsubstrate adducts as intermediates is well established. A number of arguments exist to postulate the occurrence of similar species in the reductive half-reaction of the TPQ-dependent reaction cycle. Reductive trapping experiments of enzyme in the presence of amines Klinman, 1987, 1990) have provided evidence for the formation of a Schiff base ad-* The costs of publication of this article were defrayed in part by the payment of page charges. This article must therefore be hereby marked "advertisement" in accordance with 18 U.S.C. Section 1734 solely to indicate this fact. ‡ To whom correspondence should be addressed. duct. The fact that ammonia remains attached to the cofactor after aldehyde release (Janes and Klinman, 1991) indicates that hydrolysis occurs of the tautomeric, product Schiff base adduct. However, so far stopped-flow spectrophotometry has not provided clear evidence for the postulated intermediates. On mixing lentil seedling amine oxidase with putrescine or p-dimethylaminomethylbenzylamine (Bellelli et al., 1991), only the semiquinone form of the cofactor was observed, although the conversion of cofactor-substrate adduct into aminoquinol cofactor plus (released) aldehyde was considered to be ratelimiting in the catalytic cycle. On mixing bovine plasma amine oxidase with benzylamines substituted with electron-donating substituents at the paraposition , spectra were observed having an absorption maximum between 400 and 500 nm. However, these were ascribed to a tautomeric (quinonoid) form of the product Schiff base, whereas it was proposed that the 340-nm intermediate observed with unsubstituted benzylamine belongs to the substrate Schiff base . These results suggest that conversion of the product Schiff base occurs so fast that it cannot be detected with common stopped-flow equipment. However, the fact that reductive trapping of the substrate Schiff base and exchange of C-2 hydrogens of the substrate with water hydrogens (Summers et al., 1979;Yu, 1988;Coleman et al., 1989Coleman et al., , 1991 have been found with certain enzyme/substrate combinations indicates that substantial levels of the postulated intermediates should be present during these conversions. This prompted us to look for detectable intermediates in the reductive half-reaction by using adequate concentrations of ECAO with good and moderate substrates like phenylethylamine (PEA), tyramine, and tryptamine as well as with methylamine, which is not a substrate (as judged from the fact that no O 2 consumption is observed in the assay) but is able to induce the semiquinone form of the cofactor (Moënne-Loccoz et al., 1995). It has been reported (Dooley et al., 1991) that on incubating enzyme and substrate anaerobically, the amount of Cu 1ϩ formed exceeds that of TPQH ⅐ ⅐NH. An explanation could be that disproportionation occurs according to the following, and that the TPQ/Cu 1ϩ couple formed in this is able to oxidize substrate. In amine oxidases containing 2 TPQs/enzyme molecule, this could occur via intramolecular electron transfer. However, since an ECAO molecule contains only one TPQ, if the proposed disproportionation also applies to this enzyme, this would imply the occurrence of intermolecular electron transfer. Therefore, anaerobic titrations were carried out of ECAO with substrate at conditions presumed to be favorable for disproportionation. The addition of H 2 O 2 , one of the reaction products of the catalytic cycle, to amine oxidase incubated with substrate under anaerobic conditions, leads to hydroxyl radical (OH ⅐ ) production (Castellano et al., 1993). Since H 2 O 2 could substitute for O 2 or just oxidize one of the reduced cofactor forms, the effect of this compound under anaerobic as well as that of catalase under semiaerobic conditions was studied for ECAO. Depending on the amine oxidase used, TPQH ⅐ ⅐NH/Cu 1ϩ is observed or not when adding substrate to enzyme under anaerobic conditions. The formation of semiquinone can be (artificially) induced by adding KCN because cyanide is able to pull the equilibrium (Reaction 2) to the right hand side by binding to Cu 1ϩ (Dooley et al., 1991). This trick is important as it enables spectroscopic studies of the semiquinone to be performed at low temperature (the equilibrium (Reaction 2) shifts to the left hand side on lowering the temperature). However, resonance Raman spectroscopy showed already that the properties of spontaneous and artificially induced semiquinone in ECAO are dissimilar (Moënne-Loccoz et al., 1995). This was further investigated by UV/Vis and EPR spectroscopy. EXPERIMENTAL PROCEDURES Materials-All reagents were of the highest purity commercially available and were used without further purification. All solutions were prepared with demineralized water, further deionized by passage through a Waters Milli-Q system to a resistance Ͼ17 M⍀/cm. ECAO was overproduced and purified as described. 2 Enzyme activity was routinely measured using a biological oxygen monitor with 10 mM PEA as the substrate in 0.1 M potassium phosphate, pH 7.0, containing 2 g/ml catalase at 37°C. It was assumed that the O 2 concentration in this mixture (1.6 ml) had a value of 0.199 mM. Enzyme concentrations were determined spectrophotometrically using a value of 1.67. 2 Specific activities were calculated by using an oxygen concentration of 0.199 mM in a 1.6-ml assay. The purified enzyme had a specific activity of 17.6 units/mg using 200 M PEA and 0.199 mM O 2 at 37°C. 1 unit is defined as the amount of enzyme needed to oxidize 1 mol of substrate/min. UV/Vis Spectrophotometry of Substrate-reduced Enzyme-A cuvette equipped with a rubber septum was filled with 500 l of 50 mM MOPS, pH 7.5, and flushed with argon for 5 min. Subsequently, 10 l, 269 M ECAO was added with a syringe through the septum, and the solution was flushed again for 1 min with argon. To reduce the enzyme fully, 1 l of 0.1 M substrate (PEA, tryptamine, or methylamine) in water was added with a gas-tight syringe (the substrate stock solutions were bubbled with a stream of nitrogen gas before application). The solutions were stirred with a small magnetic bar, and a continuous stream of argon was led over them. Optical absorbance spectra were measured, prior to and after the addition of substrate, with a Hewlett-Packard 8452A photodiode array spectrophotometer. In anaerobic titration experiments with PEA, 490 l of ECAO (375.8 M in 50 mM MOPS buffer, pH 7.5) was titrated with aliquots of 1 l of PEA (25 mM). Spectra were taken 2 min after the addition had taken place. The Effect of Additives and pH on Substrate-reduced Enzyme-Enzyme was fully reduced with substrate, as described above. Subsequently, additives (1 l of 1 M KCN or 1 l of 0.9 M H 2 O 2 ) were added anaerobically with a gas-tight syringe to the cuvette. Rapid Scanning Stopped-flow Spectrophotometry-Anaerobic rapid scanning stopped-flow experiments were carried out at 10, 20, and 37°C in 0.1 M potassium phosphate, pH 7.0. ECAO (79 M) was mixed with equal volumes of PEA (395 M). Anaerobiosis was obtained by flushing the enzyme and substrate solutions with argon, after which (final concentrations) 10 g/ml glucose oxidase (2 units), 2.5 g/ml catalase, and 4 mM glucose were added before the solutions were transferred to the syringes. Semiaerobic rapid scanning stopped-flow experiments were carried out at 20°C in the way described above except that the solutions did not contain glucose oxidase and glucose, and the concentrations of enzyme and substrate were somewhat different (67 M ECAO, 656 M PEA or tyramine). Furthermore, experiments were carried out in the absence and presence of catalase (33 g/ml). Data acquisition was performed with a Scientific PQ/SF-53 preparative quench/stopped-flow with an EG&G Princeton Applied Research 1024-element photodiode array detector (model 1461). Forty absorption spectra were recorded in each experiment with an automated subtraction of a reference spectrum. The reference spectrum was defined as the absorption of the buffer with substrate and, if applicable, with glucose oxidase, catalase, and glucose. Spectra were taken from 350.1 to 605.9 nm with a 2-nm resolution and with a 6 -180-ms interval. X-Band EPR Spectroscopy-EPR spectra were recorded at room temperature on a Varian E9 EPR spectrometer operating at 9.23 GHz. Spectra were taken in a quartz flat cell (2 ϫ 5 ϫ 45 mm) with 378 M ECAO in 50 mM potassium phosphate buffer, pH 7. Spectra were obtained with PEA (3.7 mM) and with combinations of KCN (6 mM) and PEA (3.7 mM), and PEA (18 mM) with H 2 O 2 (16 mM). Spectra were obtained 6 min after the mixing of substrate and additives. All EPR experiments were performed under anaerobic conditions. Enzyme and substrate solutions were flushed with argon, and the flat cell was filled in a nitrogen atmosphere glove box and sealed with Teflon stoppers. Fig. 1 shows the absorption spectrum of ECAO (in all panels indi-cated with 0), having a shape that is typical for the oxidized form of copper-quinoprotein amine oxidases. Ten seconds after adding excess PEA or tryptamine under anaerobic condition in a cuvette, a steady state was already obtained since the spectrum taken at that moment did not change further (Fig. 1, A and B). The spectra showed the disappearance of the broad 480-nm band, which has been attributed to TPQ in the oxidized enzyme ) (Scheme 1A), and the appearance of twin peaks at 440 and 468 nm, as well as a shoulder at 360 nm, characteristics also found in other amine oxidases and ascribed (Dooley et al., 1991) to the semiquinone in the TPQH ⅐ ⅐NH/Cu 1ϩ couple (Scheme 1E). On performing a similar experiment with methylamine ( Fig. 1C), only a slight decrease of the absorbances of the 480-nm band occurred, and those in the 300 -420-nm region were raised (especially around 312 and 380 nm, as judged from a difference spectrum). In the next 2.5 min, only a steady decrease at 480 nm with a concomitant increase at 312 nm was observed. A steady state was obtained, lasting for 3.5 min, after which semiquinone formation took place, a process that was completed in 3 min. Anaerobic Reduction of ECAO with Excess Substrate- Although slight differences exist between the final spectra resulting from enzyme treated with each substrate (additional absorbance above 500 nm in the case of methylamine and around 400 nm in the case of tryptamine), the spectra ascribed FIG. 1. UV/Vis absorption spectra of ECAO anaerobically reduced with PEA, tryptamine, and methylamine. Each panel shows the initial spectrum (spectrum 0) of untreated 5.3 M ECAO in 50 mM MOPS buffer, pH 7.5. The amines were added anaerobically to a concentration of 196 M. Spectra were taken after 10 s for PEA (spectrum 1, panel A) and tryptamine (spectrum 1, panel B) and after 10 s (spectrum 1), 2.5 min (spectrum 2), and 9 min (spectrum 3) for methylamine (panel C). SCHEME 1.Redox and protonated forms of the cofactors in ECAO. Protonation of TPQ and TPQH ⅐ ⅐NH and the protonation state of groups in the active site, as proposed by Hartmann and Klinman (1991) and Warncke et al. (1994). The species correspond with the formula used in the text, in the following way: oxidized enzyme (species A), TPQ/Cu 2ϩ ; tautomeric Schiff base (species B); the aminoquinol form of the enzyme (species C); 400-nm species (species D), TPQH 2 ⅐NH 3 ϩ /Cu 2ϩ ; semiquinone (species E), TPQH ⅐ ⅐NH/Cu 1ϩ . to the semiquinone are similar in shape and height. This suggests that the semiquinone was identical and induced at the same level in all cases, although the time required for its formation was very long with methylamine. Admittance of O 2 immediately converted the spectra into that of oxidized enzyme. Anaerobic Titration of ECAO with PEA-To avoid interference of remaining O 2 with the results of anaerobic titrations of ECAO with PEA, a very high concentration of enzyme was used (184.1 nmol in 0.49 ml of 50 mM MOPS buffer, pH 7.5), and the cuvette was flushed for a long time with argon. During the addition of the first four aliquots of PEA (100 nmol, Fig. 2A), a decrease of the absorbance at 480 nm occurred (10% of the total absorbance, Fig. 2B, monitored at 500 nm to avoid contribution of semiquinone formation to the absorbance value as much as possible) with no increase in absorbance in the spectral region presented and no signs for semiquinone formation. Subsequently, a substantial decrease of A 500 nm occurred with a concomitant increase of A 350 nm . The titration was completed after the addition of 11 aliquots of substrate (275 nmol of PEA), as judged from the observation that further additions did not lead to spectral changes. Based on the amount of enzyme used and the fact that ECAO contains only 1.0 molecule of hydrazinetitratable TPQ/enzyme molecule, 2 it appears that 1.5 molecules of PEA are converted per TPQ molecule. The observation that a decrease of the A 500 nm value occurred during the addition of approximately the first 1 ⁄3 of total PEA required (4 of the 11 aliquots) means that enzyme reduction took place and not consumption of remaining O 2 . Using the absorbances in Figs. 1 and 2, it is estimated that only 34% of the amount of semiquinone expected (based on the experiments shown in Fig. 1) was finally formed in the anaerobic titration (note that the experiments in Fig. 1 were performed with a 70 times lower enzyme concentration). Factors Affecting the Semiquinone Level-Absorption spectra of ECAO measured in buffers with a pH ranging from 5 to 10 were virtually the same. However, when excess PEA was added to these samples under anaerobic conditions, the final spectra observed differed substantially (Fig. 3A). At pH 5, significantly more absorbance was present around 400 nm than at pH 7.5, whereas at pH 10 a shapeless spectrum with low absorbances was obtained. Since the spectrum of the semiquinone as such is not affected by pH, as judged from the similar values for the maxima of the twin peaks, it seems that optimal semiquinone formation occurs at pH 7.5 (Fig. 3C, taking the contribution of the 400-nm species (see below) at 468 nm into account), a lower amount is present at pH 5, and virtually nothing is present at pH 10. Difference spectra (Fig. 3B) suggest the presence of a species with a maximum at 400 nm at pH 5, and one with a maximum around 310 nm at pH 10. The presence of 0.1 M KCl had no effect on the shape or height of the semiquinone spectrum. However, when ECAO was mixed with PEA plus KCN (2 mM final concentration) the spectrum obtained (Fig. 4) showed a 36% increase in height and a blue shift of the maxima (440 3 436 nm; 468 3 462 nm), as compared with that of the semiquinone in the absence of KCN. The same spectrum was obtained when the semiquinone was first induced by PEA, after which KCN was added. The oxidative half-reaction appeared to be blocked because admittance of O 2 did not affect the KCN-generated semiquinone spectrum. On adding H 2 O 2 to PEA-reduced ECAO, the spectrum of the semiquinone disappeared, whereas a new band appeared at 400 nm in a time span of 8 min (Fig. 5A). In the next minute, the latter disappeared, and the spectrum of oxidized enzyme came up. Virtually the same results were obtained with tryptamine as the substrate (Fig. 5B), except that the absorbances in the 300 -350-nm region were rather high (which could originate from the tryptaldehyde formed). On the other hand, addition of H 2 O 2 caused the immediate disappearance of the methylamine-induced semiquinone with concomitant formation of the oxidized enzyme, without any signs of an intermediate 400-nm species. Fig. 5C shows the difference spectra of the final spectra obtained of conversions taking place in the presence of H 2 O 2 minus the spectrum of oxidized enzyme. The spectra show a negative band at 480 nm in the case of tryptamine and PEA. The H 2 O 2 -treated enzyme was still active because subsequent addition of PEA or tryptamine immediately produced the 400-nm species (but hardly semiquinone), and after the conversion went to completion, the enzyme attained the oxidized state again. However, difference spectra taken after the second round in the way indicated above showed that even a further decrease of the 480-nm absorbance had taken place. Since the spectrum of oxidized enzyme was not affected by H 2 O 2 , the decrease observed is related to turnover of the enzyme. Rapid Scanning, Stopped-flow Spectrophotometry-ECAO was anaerobically mixed with PEA at 10, 20, and 37°C in a stopped-flow apparatus, and the reactions were monitored with a photodiode array spectrophotometer. Within the time span between mixing and scanning (about 6 ms), even at 10°C complete disappearance of the 480-nm band had occurred, the 400-nm intermediate had already attained its highest level, and semiquinone formation had already started. This is con-cluded from the difference spectra, obtained by subtracting the subsequent scans from the first one (Fig. 6A), showing that the 400-nm species disappears with concomitant formation of the semiquinone. From the time required for the reductive halfreaction to go to completion (1.2, 0.6, and 0.2 s at 10, 20, and 37°C, respectively) and from the spectra taken from the final state (Fig. 6B), it appears that the velocity and level of semiquinone formation attained are highest at 37°C (note that the reliability of the spectrophotometer decreases progressively below 400 nm so that it is unclear whether the 360-nm shoulder of the semiquinone, as observed in Fig. 1, is present or not). On rapid mixing of ECAO with tyramine under conditions in which no precautions were taken to prevent entrance of O 2 in the device, causing a semiaerobic condition, a similar sequence of reactions was observed but with some deviations due to the fact that also the oxidative half-reaction took place, leading to a complete conversion of the substrate added. Fig. 7A shows that just after the first 6 ms, some absorbance at 480 nm (about 20%) is still present, the spectrum of the 400 nm species is dominating, and no indications for the presence of the semiquinone species are observed. In the next phase (between 6 ms and 0.6 s, Fig. 7B), the 480-nm absorbance decreases and the level of the 400-nm species increases, again without any indication for the presence of the semiquinone species. After that (between 0.6 and 17 s, Fig. 7C), the level of the 400-nm species dropped and semiquinone formation took place. The phases in the conversion were also detected by inspecting the traces presented in Fig. 7D, upper part, being indicative (but not conclusive) for the species involved. They suggest that 1.5 s after mixing, virtually steady state levels of the species are attained and that after about 60 s, the substrate is nearly converted, as shown by the decrease in the A 404 nm and the increase of the A 500 nm , respectively. Difference spectra in the time span from 60 to about 100 s show that the concentrations of the 400-nm species and the semiquinone decrease, whereas that of oxidized enzyme (with a maximum ostensibly shifted to the right, due to the presence of semiquinone of which the concentration decreases concomitantly with the increase of the oxidized enzyme during this phase of the conversion) increases (Fig. 7E, upper part). In the presence of catalase, the time span for the complete conversion of tyramine under the semiaerobic condition was given of the spectrum at pH 5 minus that at pH 7.5 (spectrum 1) and that at pH 10 minus that at pH 7.5 (spectrum 2). The variation in species formation as a function of pH is shown by the A 468 nm and A 404 nm lines presented in panel C. enormously reduced, although the absorbance traces suggested that the same sequence of reaction steps took place (Fig. 7D, lower part). As shown in Fig. 7E, lower part, in the final phase, between 2 and 3 s after mixing, the 400-nm species is directly converted into oxidized enzyme, and the semiquinone is not detected. X-band EPR Spectroscopy-The final state obtained after mixing high concentrations of enzyme with excess substrate anaerobically in the absence and presence of the additives mentioned above was also monitored with X-band EPR at room temperature to detect TPQH ⅐ ⅐NH and Cu 2ϩ . Fig. 8A shows that Cu 2ϩ is present in all cases but that the amount was substantially lower when PEA was added and even more in the presence of KCN or H 2 O 2 as additives ( Table I). The signal of an organic free radical was present in PEA-reduced ECAO, of which the intensity increased when KCN was present (Table I). On the other hand, the signal was virtually absent in the presence of H 2 O 2 (the small signal seen showed a distortion of the first hyperfine line and of the g perpendicular region). As shown in Fig. 8B for PEA-reduced enzyme, the characteristic FIG. 5. The effect of H 2 O 2 on the UV/Vis spectra of PEA-reduced ECAO. ECAO (5.3 M in 50 mM MOPS buffer, pH 7.5) was reduced with 196 M PEA in the usual way. The solution was anaerobically brought to a concentration of 1.8 mM H 2 O 2 . Difference spectra (panel A) are presented (taking the spectrum of the solution before the addition of H 2 O 2 as a reference) for 10 s, 3 min, and 8 min after the addition of H 2 O 2 had taken place. Panel B shows the difference spectra after 10 s, 2 min, and 6 min for tryptamine-reduced ECAO treated with H 2 O 2 . Panel C shows the difference spectra of spectra of samples in which ECAO had finally attained the oxidized state (9 min after H 2 O 2 addition) minus the spectrum of untreated ECAO, for methylamine (dashed line), tryptamine (dotted line), and PEA (continuous line). FIG. 6. Rapid scanning, stopped-flow spectrophotometry of ECAO mixed with PEA under anaerobic conditions. Anaerobic solutions of ECAO and PEA were prepared as indicated under "Experimental Procedures." The experiment was carried out at 10°C. Difference spectra of the spectrum taken at a certain time minus the first one (taken 6 ms after mixing) are presented in panel A. The first five spectra were taken at intervals of 30 ms, the others at 60-ms intervals. The spectra of the mixtures after it was sure that equilibrium was attained (4 min) are given in panel B for experiments carried out at 10 (spectrum 1), 20 (spectrum 2), and 37°C (spectrum 3). hyperfine structure ascribed to TPQH ⅐ ⅐NH (Dooley et al., 1990(Dooley et al., , 1991 is present, and a g value was calculated of 2.004 Ϯ 0.0005. The addition of KCN not only lowered the Cu 2ϩ content further and raised that of TPQH ⅐ ⅐NH (Table I), but it also broadened the hyperfine lines (Fig. 8B). Furthermore, the power saturation profiles of the PEA-induced semiquinone in the absence and presence of KCN (Fig. 8C) showed a significant difference of 44 mW at half-saturation. DISCUSSION On incubating ECAO anaerobically with the substrates PEA or tyramine or with the non-substrate methylamine for the appropriate time, a stable absorption spectrum was obtained in which the contribution of that of the semiquinone, TPQH ⅐ ⅐NH, was clearly visible. Further evidence for formation of the semiquinone was provided by the EPR spectra taken of PEAreduced ECAO, showing its characteristic features. Thus, FIG. 7. Rapid scanning, stopped-flow spectrophotometry of ECAO mixed with tyramine under semiaerobic conditions. The solutions of ECAO and tyramine were prepared as indicated under "Experimental Procedures." The experiment was carried out at 20°C. Panel A shows the spectra of the mixture taken 6 (spectrum 1) and 12 ms (spectrum 2) after mixing. Panel B shows difference spectra of the spectra taken at time intervals of 120 ms after mixing minus spectrum 1 from panel A. Panel C shows also difference spectra taken at time intervals of 120 ms but from 600 ms after mixing and with the spectrum taken at that time (spectrum 5, panel B) as a reference. Panel D shows the course of the reaction as presented by the A 404 nm , A 468 nm , and A 500 nm traces for experiments in the absence (upper part) and presence (lower part) of catalase. Panel E, upper part, shows the difference spectra of the events taking place between 60 and 110 s of the reaction taking place in the absence of catalase (see panel D, upper part). Panel E, lower part, shows that of the events taking place between 2.0 and 2.8 s in the presence of catalase (see panel D, lower part). ECAO is an enzyme for which the equilibrium constant of Reaction 2 has a value enabling detection of the semiquinone. On adding KCN to the mixture, the properties of the semiquinone were affected, as judged from the induction of a blue shift in the absorption maxima and the power saturation behavior in the EPR spectrometer. The ability of KCN to act as a promotor of semiquinone formation has been ascribed (Dooley et al., 1991) to the affinity of cyanide for Cu 1ϩ , shifting the equilibrium (Reaction 2) to the right hand side. Although the complexation of the reduced inorganic cofactor might affect the properties of the semiquinone form of the organic cofactor, in principle, cyanide being a nucleophile, the organic cofactor could also be a candidate for reacting with cyanide. A precedent already exists since the quinone cofactor pyrroloquinoline quinone forms an adduct with cyanide at the C-5 carbonyl position, even in the presence of an excess of water (Dekker et al., 1982). However, since the absorption maxima of the semiquinone already present shifted immediately upon addition of KCN and reaction with TPQH ⅐ ⅐NH would require replacement of the NH 2 group by cyanide, the latter possibility seems unlikely. It is tentatively concluded, therefore, that cyanide affects the properties of TPQH ⅐ ⅐NH by binding to Cu 1ϩ . Anyhow, in view of the use of KCN as an additive in experiments to induce sufficient amounts of semiquinone at the low temperature required for certain spectroscopic techniques (e.g. ESEEM studies (Warncke et al., 1994)), it should be realized in these cases that the presence of cyanide could affect the results. It has been suggested that the TPQH ⅐ ⅐NH/Cu 1ϩ is the primary species that reacts with O 2 in the oxidative half-reaction (Dooley et al., 1991). Apparently, the affinity of this couple for O 2 is so high that its spectrum is not observed in aerobic conversions. However, with the semiaerobic conditions used in Fig. 7, it was detected (Fig. 7D, upper part; Fig. 7E, upper part) in the phase where the level of the 400-nm species and the semiquinone steadily decreased and that of the oxidized enzyme increased. In line with this, in the presence of catalase, invoking a higher O 2 tension, the semiquinone is not observed in the final phase (Fig. 7E, lower part). Similarly, the semiquinone disappears immediately on adding H 2 O 2 , but binding of cyanide to it prevents its conversion with H 2 O 2 and even with O 2 . All these observations confirm the idea that the TPQH ⅐ ⅐NH/Cu 1ϩ couple plays a role in the oxidative half part of the cycle, but it cannot yet be decided whether it is the primary species with which O 2 reacts. The results described here clearly show the presence of a 400-nm species under certain circumstances. Since it was obadditions. The effects of KCN are presented in panel B. Conditions were similar to those in panel A, except that the modulation amplitude was 1 G and the power was 2 mW. The quantitative results of the experiments presented in panels A and B are presented in Table I. Panel C shows the microwave saturation behavior of ECAO semiquinone in the absence and presence of KCN (note that 0 dB ϭ 200 mW). FIG. 8. X-band EPR spectroscopy of ECAO at room temperature, with and without the presence of substrate and additives. The EPR spectra were obtained with approximately 90 l of ECAO (378 M ECAO in 50 mM potassium phosphate buffer, pH 7). The spectra were measured at room temperature with the following settings: microwave power, 200 mW; frequency, 9.515 GHz; modulation amplitude, 10 G. Panel A shows the effect of anaerobic substrate and additive served in substrate-reduced enzyme preparations at anaerobic conditions, especially at pH 5, and enzyme-substrate or enzyme-product adducts will be absent when the equilibrium situation is reached, the 400-nm species must be a protonated form of the reduced enzyme for which a structure is proposed in Scheme 1 (species D, indicated with the formula TPQH 2 ⅐NH 3 ϩ ). To stabilize the Zwitter ionic structure, active site amino acid residues of opposite charge may be involved. From its proposed place in the chain of reactions, it is clear that semiquinone (species E) formation is always preceded by that of species D, as indicated by the stopped-flow results, and that species C (TPQH 2 ⅐NH 2 ), having a maximum around 310 nm, is present at high pH. Why has the 400-nm species not been observed in experiments with other amine oxidases? Certain amine oxidases, e.g. bovine plasma amine oxidase , do not show semiquinone formation upon reduction of enzyme under anaerobic conditions, whereas others show high levels of semiquinone under these conditions, e.g. lentil seedling amine oxidase (Bellelli et al., 1991). Thus it could be imagined that, depending on the type of amine oxidase, formation of one of the three species of reduced enzyme proposed is favored, in that view ECAO being an enzyme stabilizing species D. Furthermore, it should also be realized that conditions like pH, temperature, and enzyme concentration affect the equilibria, which can be decisive as to whether the 400-nm species is observed or not, as illustrated here by the effect of these variables on the level of semiquinone detected in substrate-reduced ECAO. The titration experiments at high enzyme concentrations showed that far more substrate is consumed than expected, based on the assumption that only one TPQ is involved in the conversion. It must be concluded, therefore, that Cu 2ϩ also participates in the reaction, in line with what was found by EPR spectroscopy of PEA-reduced ECAO in another experiment conducted at lower enzyme concentration (Table I). Since direct reaction of substrate with Cu 2ϩ seems unrealistic, the disproportionation of the semiquinone as proposed in Reaction 4, providing TPQ/Cu 1ϩ , which converts substrate, becomes realistic. Accepting this, some observations of enzyme at high concentration with substrate are understandable: in the first 1 ⁄3 of the titration experiment, the TPQ content decreases but no semiquinone (or 400 nm species) is formed because the conditions for disproportionation are favorable in this phase of the conversion; the semiquinone content at the end of the titration is only 34% from that expected, as compared with the results of titrations with a much more diluted enzyme preparation. As a consequence of the disproportionation proposed, more than one equivalent aldehyde should be produced, and part of the NH 3 should be released under anaerobic conditions, disagreeing with the findings for bovine plasma amine oxidase (Janes and Klinman, 1991). However, in view of the absence of semiquinone in this enzyme upon substrate addition , it could be reasoned that disproportionation did not occur under the conditions used for product analysis. To substantiate the view presented here, we will carry out product analysis of ECAO under conditions optimal for disproportionation. It has been reported (Castellano et al., 1993) that anaerobic addition of H 2 O 2 to substrate-reduced amine oxidases leads to OH ⅐ production. Most probably, this also happens with ECAO since destruction of the enzyme occurred, observed as a decrease of the 480-nm band with PEA and tryptamine, but not with the non-substrate methylamine (Fig. 5C). Formation of OH ⅐ has been proposed (Castellano et al., 1993) to occur in the following reactions. However, based on arguments presented below, the following reaction could also be feasible. Since the semiquinone disappeared on H 2 O 2 addition (but not the KCN-induced one), as observed with UV/Vis as well as EPR spectroscopy, but the 400-nm species did not, the semiquinone seems a good candidate for the reaction with H 2 O 2 , the latter perhaps mimicking O 2 , as has been observed for several other oxidases. The reaction proposed is in line with the finding that substrate conversion takes place (via the TPQ/Cu 1ϩ formed in the reaction) and that addition of H 2 O 2 to PEA-reduced ECAO increased the level of Cu 1ϩ (Table I). However, in case TPQH ⅐ ⅐NH/Cu 1ϩ does not react with H 2 O 2 , the conversion with H 2 O 2 should be dead-ended. Since slow conversion takes place, either H 2 O 2 can act as electron acceptor or a small amount of O 2 generated from decomposition of H 2 O 2 is responsible for that. The slow conversion occurring under semiaerobic conditions (Fig. 7D, top part) might be due to the H 2 O 2 formed, since an enormous stimulation of the rate was observed in the presence of catalase (Fig. 7D, bottom part). The presence of catalase should also prevent enzyme inactivation, in line with the observation made for pea seedling amine oxidase (Mann, 1955). Taken together, the results for ECAO suggest that H 2 O 2 formation can be a disturbing factor for conversions at high substrate concentrations but not for initial rate measurements in assays carried out under normal aerobic conditions.
#!/usr/bin/env python # -*- coding: utf-8 -*- #pip install requests import requests #print (requests.__file__) r = requests.get('https://www.bitstamp.net/api/ticker/') json = r.json() print(json.get("high")) print(json)
<filename>home_application/urls.py<gh_stars>0 # -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云(BlueKing) available. Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from django.conf.urls import patterns,url urlpatterns = patterns( '', url(r'^$', 'home_application.views.home', name='home'), url(r'^project-list/$', 'home_application.views.project_list'), url(r'^project-list/edit$', 'home_application.views.project_edit'), url(r'^project-list/del$', 'home_application.views.project_delete'), ) urlpatterns += patterns( '', (r'^group-list/$', 'home_application.views.group_list'), (r'^group-list/edit$', 'home_application.views.group_edit'), (r'^group-list/del$', 'home_application.views.group_delete'), (r'^group-list/ip/delete$', 'home_application.views.group_list_ipdel'), ) urlpatterns += patterns( '', url(r'^cache/other$', 'home_application.views.clear_cache'), url(r'^user/authorize$', 'home_application.views.user_authorize'), url(r'^user/authorize/edit$', 'home_application.views.user_authorize_edit'), ) urlpatterns += patterns( '', (r'^api/v1/group/$', 'home_application.api.group_api'), (r'^api/v1/group/push$', 'home_application.api.group_push_api'), (r'^api/v1/ip/$', 'home_application.api.ip_api'), (r'^api/v1/project/$', 'home_application.api.project_api'), (r'^api/v1/project/bond$', 'home_application.api.project_bond_api'), (r'^api/v1/project/push$', 'home_application.api.project_push_api'), (r'^api/v1/cache/qiniu$', 'home_application.api.qiniu_cache_api'), (r'^api/v1/cache/yun$', 'home_application.api.yuncdn_cache_api'), (r'^api/v1/user/group/$', 'home_application.api.role_group_api'), (r'^api/v1/user/group/del$', 'home_application.api.role_group_del_api'), (r'^api/v1/user/group/edit$', 'home_application.api.role_group_save_api'), (r'^api/v1/user/group/permission$', 'home_application.api.role_group_permission_api'), )
def key_from_path(self, path="", base_dir=""): path = path.replace(base_dir, '') if path[0] == os.path.sep: path = path[1:] return os.path.join(self.rootdir, path)
/** * @param statusCode The status code to report. */ public void status(int statusCode) { int statusClass = statusCode / 100; if (statusClass == 5 || statusClass == 4) error(statusCode); else response.setStatus(statusCode); }