content
stringlengths
10
4.9M
// validTimestamp validates if the MessageBird-Request-Timestamp is a valid // date and if the request is older than the validator Period. func (v *Validator) validTimestamp(ts string) bool { t, err := stringToTime(ts) if err != nil { return false } diff := time.Now().Add(ValidityWindow / 2).Sub(t) return diff < ValidityWindow && diff > 0 }
<filename>viewmover/src/main/java/com/scalified/viewmover/movers/ViewMover.java /* * Copyright 2016 Scalified <http://www.scalified.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.scalified.viewmover.movers; import android.view.View; import android.view.animation.Animation; import android.view.animation.Interpolator; import android.view.animation.TranslateAnimation; import com.scalified.viewmover.configuration.MovingParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Abstract class, which contains the base view movement logic * <p> * Is extended by subclasses, which implements specific movement logic * * @author shell * @version 1.0.0 * @since 1.0.0 */ public abstract class ViewMover { /** * Logger */ private static final Logger LOGGER = LoggerFactory.getLogger(ViewMover.class); /** * {@link android.view.View}, which is to be moved */ private final View view; /** * Overrides default constructor * * @param view {@link android.view.View}, which is to be moved */ ViewMover(View view) { this.view = view; } /** * Is called to calculate the end X point of the view's left bound * <p> * Used to check whether there is enough space inside parent container to move the view * to the left * * @param xAxisDelta X-axis delta in actual pixels * @return end X point of the view's left bound */ abstract int calculateEndLeftBound(float xAxisDelta); /** * Is called to calculate the end X point of the view's right bound * <p> * Used to check whether there is enough space inside parent container to move the view * to the right * * @param xAxisDelta X-axis delta in actual pixels * @return end X point of the view's right bound */ abstract int calculateEndRightBound(float xAxisDelta); /** * Is called to calculate the end Y point of the view's top bound * <p> * Used to check whether there is enough space inside parent container to move the view * to the top * * @param yAxisDelta Y-axis delta in actual pixels * @return end Y point of the view's top bound */ abstract int calculateEndTopBound(float yAxisDelta); /** * Is called to calculate the end Y point of the view's bottom bound * <p> * Used to check whether there is enough space inside parent container to move the view * to the bottom * * @param yAxisDelta Y-axis delta in actual pixels * @return end Y point of the view's bottom bound */ abstract int calculateEndBottomBound(float yAxisDelta); /** * Is called when move animation completes * <p> * Used to change the view position within its parent container * * @param xAxisDelta X-axis delta in actual pixels * @param yAxisDelta Y-axis delta in actual pixels */ abstract void changeViewPosition(float xAxisDelta, float yAxisDelta); /** * Returns the view, which is to be moved * * @return view to be moved */ View getView() { return view; } /** * Returns the parent container of the view, which is to be moved * * @return parent container of the view to be moved */ View getParentView() { return (View) view.getParent(); } /** * Moves the view based on the {@link MovingParams} * * @param params params of the move action */ public void move(MovingParams params) { if (isPreviousAnimationCompleted()) { MovingParams verifiedParams = getVerifiedMovingParams(params); if (isMoveNonZero(verifiedParams)) { final Animation moveAnimation = createAnimation(verifiedParams); LOGGER.trace("View is about to be moved at: delta X-axis = {}, delta Y-axis = {}", verifiedParams.getXAxisDelta(), verifiedParams.getYAxisDelta()); view.startAnimation(moveAnimation); } } } /** * Checks whether previous animation on the view completed * * @return true if previous animation on the view completed, otherwise false */ boolean isPreviousAnimationCompleted() { Animation previousAnimation = view.getAnimation(); boolean previousAnimationCompleted = previousAnimation == null || previousAnimation.hasEnded(); if (!previousAnimationCompleted) { LOGGER.warn("Unable to move the view. View is being currently moving"); } return previousAnimationCompleted; } /** * Checks whether both X-axis and Y-axis delta of the moving details are not {@code zero} * * @param details moving details, which needs to be checked * @return true, if any of the X-axis or Y-axis delta of the moving details are {@code zero}, * otherwise false */ boolean isMoveNonZero(MovingParams details) { boolean moveNonZero = details.getXAxisDelta() != 0.0f || details.getYAxisDelta() != 0.0f; if (!moveNonZero) { LOGGER.warn("Zero movement detected. No movement will be performed"); } return moveNonZero; } /** * Creates an updated copy of the {@link MovingParams} * with X-axis and Y-axis deltas updated based on calculations returned from * {@link #updateXAxisDelta(MovingParams)} and * {@link #updateYAxisDelta(MovingParams)} * * @param params moving params, which needs to be updated */ private MovingParams getVerifiedMovingParams(final MovingParams params) { MovingParams mParams = new MovingParams(params); updateXAxisDelta(mParams); updateYAxisDelta(mParams); LOGGER.trace("Updated moving details values: X-axis from {} to {}, Y-axis from {} to {}", params.getXAxisDelta(), mParams.getXAxisDelta(), params.getYAxisDelta(), mParams.getYAxisDelta()); return mParams; } /** * Updates the X-axis delta in moving details based on checking whether * there is enough space left to move the view horizontally * * @param details moving details, which X-axis delta needs to be updated in */ private void updateXAxisDelta(MovingParams details) { if (!hasHorizontalSpaceToMove(details.getXAxisDelta())) { LOGGER.warn("Unable to move the view horizontally. No horizontal space left to move"); details.setXAxisDelta(0.0f); } } /** * Updates the Y-axis delta in moving details based on checking whether * there is enough space left to move the view vertically * * @param details moving details, which Y-axis delta needs to be updated in */ private void updateYAxisDelta(MovingParams details) { if (!hasVerticalSpaceToMove(details.getYAxisDelta())) { LOGGER.warn("Unable to move the view vertically. No vertical space left to move"); details.setYAxisDelta(0.0f); } } /** * Checks whether there is enough space left to move the view horizontally within * its parent container * <p> * Calls {@link #calculateEndLeftBound(float)} and {@link #calculateEndRightBound(float)} * to calculate the resulting X coordinate of the view's left and right bounds * * @param xAxisDelta X-axis delta in actual pixels * @return true if there is enough space to move the view horizontally, otherwise false */ private boolean hasHorizontalSpaceToMove(float xAxisDelta) { int parentWidth = getParentView().getWidth(); LOGGER.trace("Parent view width is: {}", parentWidth); int endLeftBound = calculateEndLeftBound(xAxisDelta); int endRightBound = calculateEndRightBound(xAxisDelta); LOGGER.trace("Calculated end bounds: left = {}, right = {}", endLeftBound, endRightBound); return endLeftBound >= 0 && endRightBound <= parentWidth; } /** * Checks whether there is enough space left to move the view vertically within * its parent container * <p> * Calls {@link #calculateEndTopBound(float)} and {@link #calculateEndBottomBound(float)} * to calculate the resulting Y coordinate of the view's top and bottom bounds * * @param yAxisDelta Y-axis delta in actual pixels * @return true if there is enough space to move the view vertically, otherwise false */ private boolean hasVerticalSpaceToMove(float yAxisDelta) { int parentHeight = getParentView().getHeight(); LOGGER.trace("Parent view height is: {}", parentHeight); int endTopBound = calculateEndTopBound(yAxisDelta); int endBottomBound = calculateEndBottomBound(yAxisDelta); LOGGER.trace("Calculated end bounds: top = {}, bottom = {}", endTopBound, endBottomBound); return endTopBound >= 0 && endBottomBound <= parentHeight; } /** * Creates the moving animation * <p> * Configures the moving animation based on moving params * * @param params params, which is used to configure the moving animation * @return moving animation */ private Animation createAnimation(MovingParams params) { Animation animation = new TranslateAnimation(0, params.getXAxisDelta(), 0, params.getYAxisDelta()); animation.setFillEnabled(true); animation.setFillBefore(false); animation.setDuration(params.getAnimationDuration()); Interpolator interpolator = params.getAnimationInterpolator(); if (interpolator != null) { animation.setInterpolator(interpolator); } animation.setAnimationListener(new MoveAnimationListener(params)); return animation; } /** * Move animation listener class * <p> * Used to listen the animation and call the {@link #changeViewPosition(float, float)} * when animation completes */ private class MoveAnimationListener implements Animation.AnimationListener { /** * Moving parameters */ private final MovingParams params; /** * An external animation listener */ private final Animation.AnimationListener animationListener; /** * Creates an instance of the * {@link com.scalified.viewmover.movers.ViewMover.MoveAnimationListener} * * @param params moving params */ private MoveAnimationListener(MovingParams params) { this.params = params; this.animationListener = params.getAnimationListener(); } @Override public void onAnimationStart(Animation animation) { if (animationListener != null) { animationListener.onAnimationStart(animation); } } @Override public void onAnimationRepeat(Animation animation) { if (animationListener != null) { animationListener.onAnimationRepeat(animation); } } /** * Is called when animation completes * <p> * Calls the {@link #changeViewPosition(float, float)} giving the subclasses * the ability to change the position of the view based on their logic * * @param animation moving animation */ @Override public void onAnimationEnd(Animation animation) { changeViewPosition(params.getXAxisDelta(), params.getYAxisDelta()); if (animationListener != null) { animationListener.onAnimationEnd(animation); } } } }
import fs from "fs/promises"; import { exec } from "child_process" import { settings } from "cluster"; import { PackerManager } from "./packer" export class TerraformManager { constructor() { } setup = (config: any, templates:any): Promise<any> => { return new Promise(async (resolve, reject) => { let terraformConfig = config; // //In Terraform Ordner gehen //read file names console.log("now this:") let terraformFiles = await fs.readdir("./src/terraform"); if (!terraformFiles) { //throw error } console.log(terraformFiles) // sort out the files that are not relevant (variables/preseed) let definitonFiles = []; //Checken ob das so passt? for(let filename of terraformFiles){ if (filename.includes("json")) { if (!filename.includes("/variables.json")) { definitonFiles.push(filename) } } } if (!definitonFiles) { //Throw error because no config files are given/ is empty } // Variables.tf File erstellen fs.writeFile(`./src/terraform/terraform.tfvars.json`, JSON.stringify(terraformConfig)) // Terraform init console.log("Initialize Terraform") await this.init(); // Terraform plan console.log("Plan Terraform") await this.plan() /** * TODO * make an output.tf or build something that makes this work * * In Konfig kann man eventuell einfach festlegen, welche Variablen zusätzlich zur IP ausgegeben werden. * Erzeugen der Der Output.TF auf Basis der Eingaben * */ await this.getIp(); // Terraform apply console.log("Apply Terraform") await this.apply() resolve(true); }) } init = () =>{ return new Promise(async (resolve, reject) => { exec(`terraform init`, {cwd: 'src/terraform'}, (error, stdout, stderr) => { // Set new variables.json if there is an error -> corrected file if (error !== null) { console.log('exec error: ' + error); reject(); } else { if(stdout == ""){ console.log( "✅") } console.log('stdout: ' + stdout) console.log('stderr: ' + stderr); resolve(true); } }); }) } plan = () =>{ return new Promise(async (resolve, reject) => { exec(`terraform plan`, {cwd: 'src/terraform'}, (error, stdout, stderr) => { // Set new variables.json if there is an error -> corrected file if (error !== null) { console.log('exec error: ' + error); reject(); } else { if(stdout == ""){ console.log( "✅") } console.log('stdout: ' + stdout) console.log('stderr: ' + stderr); resolve(true); } }); }) } apply = () =>{ return new Promise(async (resolve, reject) => { exec(`terraform apply -auto-approve`, {cwd: 'src/terraform'}, (error, stdout, stderr) => { //Do not auto approve! -> ask user! // Set new variables.json if there is an error -> corrected file console.log('stdout: ' + stdout) console.log('stderr: ' + stderr); if (error !== null) { console.log('exec error: ' + error); reject(); } else { if(stdout == ""){ console.log( "✅") } console.log('stdout: ' + stdout) console.log('stderr: ' + stderr); resolve(true); } }); }) } /** * Destroy Infrastructure */ destroy = () =>{ return new Promise(async (resolve, reject) => { exec(`terraform destroy -auto-approve`, {cwd: 'src/terraform'}, (error, stdout, stderr) => { // Set new variables.json if there is an error -> corrected file if (error !== null) { console.log('exec error: ' + error); reject(); } else { if(stdout == ""){ console.log( "✅") } console.log('stdout: ' + stdout) console.log('stderr: ' + stderr); resolve(true); } }); }) } /** * Get IP Address the TF way */ getIp = ():Promise<void> =>{ return new Promise(async (resolve, reject) => { const inventory = JSON.parse(await fs.readFile("src/terraform/terraform.tfstate", "utf-8")); //copy out all names with respective ip addresses let instances = []; for(let resource of inventory.resources){ if(resource.mode == "managed" && resource.type == "vsphere_virtual_machine"){ // We need the name/type/ip let type = resource.name; for(let singleInstance in resource.instances){ let instObj = JSON.parse(singleInstance); let name = instObj.attributes.name; let ip = instObj.attributes.default_ip_address instances.push({type: type, name: name, ip: ip}) } } } //create new ansible inventory let ansibleInventory = ""; let index = 0; let newGroup = true; for(let singleInstance of instances){ if(index == 0 || newGroup){ ansibleInventory = `[${singleInstance.type}] `; newGroup = false; } index++; ansibleInventory = ansibleInventory + singleInstance.ip + `ansible_ssh_user=${"%%"+ singleInstance.name + "-USER%%"} ansible_ssh_pass=${"%%" + singleInstance.name + "-PASS%%"} `; if(singleInstance.type != instances[index].type){ newGroup = true; } } fs.writeFile('src/ansible', ansibleInventory, "utf8"); resolve(); }) } }
<gh_stars>100-1000 package de.espend.idea.php.toolbox.matcher.twig; import com.intellij.openapi.fileTypes.FileType; import com.jetbrains.twig.TwigFileType; import de.espend.idea.php.toolbox.dict.json.JsonSignature; import de.espend.idea.php.toolbox.dict.matcher.LanguageMatcherParameter; import de.espend.idea.php.toolbox.extension.LanguageRegistrarMatcherInterface; import de.espend.idea.php.toolbox.utils.TwigUtil; import org.apache.commons.lang.StringUtils; import org.jetbrains.annotations.NotNull; import java.util.HashSet; import java.util.Set; /** * @author <NAME> <<EMAIL>> */ public class TwigBlockRegistrarMatcher implements LanguageRegistrarMatcherInterface { @Override public boolean matches(@NotNull LanguageMatcherParameter parameter) { Set<String> functions = new HashSet<>(); for (JsonSignature signature : parameter.getSignatures()) { if(StringUtils.isBlank(signature.getFunction())) { continue; } functions.add(signature.getFunction()); } return TwigUtil.getPrintBlockFunctionPattern(functions.toArray(new String[functions.size()])).accepts(parameter.getElement()); } @Override public boolean supports(@NotNull FileType fileType) { return fileType == TwigFileType.INSTANCE; } }
By Karam al-Masri Cigarettes stuffed with grape leaves instead of tobacco, gardens on bombed-out rooftops, and batteries powered by rusted bicycles: in Syria's besieged eastern Aleppo, necessity is the mother of invention. More than 250,000 people have been under a government siege in the rebel-held side of the northern city since July, without access to aid, food, fuel, medicine or even cigarettes. The blockade has sparked severe shortages and exorbitant prices for the few basic goods available, forcing residents to find innovative ways to cope. "We've been forced back into the Stone Age," said Khaled Kurdiyah, who lives in Aleppo's eastern district of Karam al-Jabal. Kurdiyah is the mastermind behind "the can" - a metal container outfitted with a fan to create a highly controlled wood fire that acts as a substitute for a gas stove. "I punched a hole in a used ghee can that we were going to throw away and fixed a fan on it," he explains to AFP. The 25-year-old breaks up a few pieces of wood and tosses them into the dented container, lighting a fire and crowning it with a teapot. "This way, we can direct the flames from the wood in a certain direction to create an even bigger fire while rationing our firewood," he said. Pedalling power Like many basic goods, gasoline and diesel are increasingly valuable in Aleppo's east, where state-run electricity is mostly cut off. Some residents have developed a system to melt scraps of plastic into fuel, which is then used to run electricity generators. But the process can produce unexpected explosions and be deadly. So Abu Rahmo has developed a cleaner form of energy, using the pedalling power of residents. In his workshop in the Ansari neighbourhood, the 48-year-old mechanic welds a dynamo - the small generator used to charge car batteries - onto the back of an old bicycle. "We have neither electricity nor generators... So I take the dynamos out of cars and fix them onto bicycles to charge car batteries," he said. The batteries can then be used to turn on lights, charge phones, and "even power washing machines," Abu Rahmo said. The balding Aleppan sells one bicycle every few days at a price of about 10,000 Syrian pounds ($20). Once the sale has been confirmed, he carries the contraption to the buyer's house for a demonstration of how it works and any final adjustments. A lanky teenager slips his sandaled feet into the pedals of a recently sold bicycle, pumping until the attached light bulb flickers on. 'Worth more than gold' Just a few streets away in the Kalasseh district, 28-year-old Amir Sendeh unlocks a metal door, disappearing into a small outdoor courtyard. He checks on a handful of scrawny chickens - invaluable in a place like Aleppo - before shuffling up a flight of stairs to his roof. There, white foam boxes are organised in neat rows, some of them lush with bright green sprouts. "I bought some seeds and planted them on the roof of my house," says Sendeh, caressing the plants. Since government forces surrounded Aleppo in July, food items have become hard to find or prohibitively expensive, or both. The price of sugar has gone up nearly tenfold to 3,500 ($7) pounds per kg, and salad ingredients like parsley or tomatoes are at least five times more costly than before the siege. So some residents of Aleppo have planted small rooftop gardens to harvest their own food. "Right now, I've got parsley, radishes, and soon I'll have some spinach and chard," Sendeh said proudly. In moments of calm, residents of east Aleppo sit along bombed-out streets and deftly roll cigarettes, but instead of tobacco, they stuff them with dried, shredded grape leaves. "A pack of cigarettes is worth more than gold these days, and the price goes up every day," said Ahmad Oweija, 43. Residents now refer to real cigarettes, stuffed with tobacco, as "foreign". They sell for about 2,000 Syrian pounds ($4) each, he said. Before the siege, that was the price for a whole pack of 20. Oweija sells hand-rolled cigarettes filled with vine leaves that he picks, dries out, then grinds up in the Bustan al-Qasr district. If he has real tobacco to spare, he sprinkles a small amount in each cigarette - just for taste. With real cigarettes so rare and costly, they even serve as their own form of currency, Oweija claims. "I know people who have bought houses and cars for a few packs of foreign cigarettes."
, Fla. (AP) — Yankees starting pitcher Chien-Ming Wang has had season-ending arthroscopic surgery on his injured right shoulder. Noted orthopedist Dr. James Andrews performed the procedure Wednesday to repair what Yankees manager Joe Girardi said was a tear in the capsule. The team is awaiting reports from the surgery before setting a potential timetable for Wang's return next year. After winning 19 games in 2006 and 2007, Wang missed the final 3½ months last season after injuring his right foot while running the bases in Houston. He was 1-6 with a 9.64 ERA in 12 games this season, after missing time from April 19 to May 21 with a hip injury. Also, the Yankees obtained right-hander Jason Hirsh from Colorado for a player to named, and pitcher Brett Tomko, designated for assignment on July 22, was released.
use super::lexer::Token; use super::span::{Span, Spanned, Spanning}; pub use ast::{Expression, Grammar, Production}; use error::Error; use nom::{ branch::alt, combinator::{cut, map, opt}, multi::many1, sequence::{pair, preceded, separated_pair, terminated, tuple}, IResult, }; use tokens::*; use utils::*; pub mod ast; pub mod error; #[cfg(test)] mod tests; mod tokens; mod utils; fn grouped(i: Tokens) -> IResult<Tokens, Spanned<Expression>, Spanned<Error>> { map( tuple((start_group_symbol, alternative, cut(end_group_symbol))), |(open, expr, close)| expr.node.spanning(Span::combine(&open.span, &close.span)), )(i) } fn repeated(i: Tokens) -> IResult<Tokens, Spanned<Expression>, Spanned<Error>> { map( tuple((start_repeat_symbol, alternative, cut(end_repeat_symbol))), |(open, node, close)| { Expression::Repeated(Box::new(node)).spanning(Span::combine(&open.span, &close.span)) }, )(i) } fn optional(i: Tokens) -> IResult<Tokens, Spanned<Expression>, Spanned<Error>> { map( tuple((start_option_symbol, alternative, cut(end_option_symbol))), |(open, node, close)| { Expression::Optional(Box::new(node)).spanning(Span::combine(&open.span, &close.span)) }, )(i) } fn factor(i: Tokens) -> IResult<Tokens, Spanned<Expression>, Spanned<Error>> { map( pair( opt(terminated(integer, cut(repetition_symbol))), alt(( optional, repeated, grouped, nonterminal, terminal, special, empty, )), ), |(repetition, node)| match (repetition, node) { (Some(count @ Spanned { node: 0, .. }), node) => { let span = Span::combine(&count.span, &node.span); Expression::Empty.spanning(span) } (Some(count), node) => { let span = Span::combine(&count.span, &node.span); Expression::Factor { count, primary: Box::new(node), } .spanning(span) } (None, node) => node, }, )(i) } fn term(i: Tokens) -> IResult<Tokens, Spanned<Expression>, Spanned<Error>> { map( pair(factor, opt(preceded(exception_symbol, cut(factor)))), |(primary, exception)| match exception { None => primary, Some(ex) => { let span = Span::combine(&primary.span, &ex.span); Expression::Exception { subject: Box::new(primary), restriction: Box::new(ex), } .spanning(span) } }, )(i) } fn sequence(i: Tokens) -> IResult<Tokens, Spanned<Expression>, Spanned<Error>> { map( separated_list1(concatenation_symbol, term), |nodes| match nodes.len() { 1 => nodes[0].clone(), _ => Expression::Sequence { first: Box::new(nodes[0].clone()), second: Box::new(nodes[1].clone()), rest: nodes[2..].to_vec(), } .spanning(Span::combine(&nodes[0].span, &nodes[nodes.len() - 1].span)), }, )(i) } fn alternative(i: Tokens) -> IResult<Tokens, Spanned<Expression>, Spanned<Error>> { map_err( map( separated_list1(definition_separator, sequence), |nodes| match nodes.len() { 1 => nodes[0].clone(), _ => Expression::Alternative { first: Box::new(nodes[0].clone()), second: Box::new(nodes[1].clone()), rest: nodes[2..].to_vec(), } .spanning(Span::combine(&nodes[0].span, &nodes[nodes.len() - 1].span)), }, ), |e| match e { Spanned { node: Error::Nom(nom::error::ErrorKind::SeparatedList), span, } => Error::DefinitionExpected.spanning(span), e => e, }, )(i) } fn production(i: Tokens) -> IResult<Tokens, Spanned<Production>, Spanned<Error>> { map( non_eof(cut(pair( separated_pair(identifier, definition_symbol, alternative), terminator_symbol, ))), |((identifier, definitions), terminator)| { let span = Span::combine(&identifier.span, &terminator.span); Production { lhs: identifier, rhs: definitions, } .spanning(span) }, )(i) } fn syntax(i: Tokens) -> IResult<Tokens, Spanned<Grammar>, Spanned<Error>> { map(many1(production), |productions| { let span = Span::combine( &productions[0].span, &productions[productions.len() - 1].span, ); Grammar { productions }.spanning(span) })(i) } pub(super) fn parse(tokens: &[Spanned<Token>]) -> Result<Spanned<Grammar>, Spanned<Error>> { match syntax(Tokens::new(&tokens)) { Ok((_, grammar)) => Ok(grammar), Err(nom::Err::Failure(inner)) => Err(inner), Err(nom::Err::Error(inner)) => Err(inner), _ => unreachable!(), } }
def is_valid_replay(replay_file_path): if replay_file_path[-9:] != "SC2Replay": return False try: replay = sc2reader.load_replay(replay_file_path) if any(v != (0, {}) for v in replay.plugin_result.values()): print(replay_file_path, replay.plugin_result) except: print(replay_file_path, "cannot load using sc2reader due to an internal error:") traceback.print_exc() return False if not replay.is_ladder and "spawningtool" not in replay_file_path: print(replay_file_path + " is not a ladder game") return False if replay.winner is None: print(replay.filename, "has no winner information") return False try: if replay.attributes[1]["Controller"] == "Computer" or replay.attributes[2]["Controller"] == "Computer": print(replay.filename, "is a player vs. AI game") return False except: traceback.print_exc() return False if replay.length.seconds < 300: print(replay.filename, "is shorter than 5 minutes") return False if len(replay.players) != 2: print(replay.filename, "is not a 1v1 game") return False print("replay verified") return True
/** * Builds a nested tree that follows the same file structure as the system, with the * base directory of the current repository as the root. Subsequent calls to this method * will update the items in place * @param repoFiles the files to add to the tree * @param root the root of the tree */ @Override protected void addTreeItemsToRoot(List<RepoFile> repoFiles, TreeItem<RepoFile> root){ Map<Integer, List<RepoFile>> filesAtDepthMap = new HashMap<>(); int maxDepth = 0; for(RepoFile repoFile : repoFiles){ int depthInRepo = repoFile.getLevelInRepository(); if(depthInRepo > maxDepth) maxDepth=depthInRepo; if(!filesAtDepthMap.containsKey(depthInRepo)){ List<RepoFile> list = new LinkedList<>(); list.add(repoFile); filesAtDepthMap.put(depthInRepo, list); }else{ filesAtDepthMap.get(depthInRepo).add(repoFile); } } Set<TreeItem<RepoFile>> itemsToRemove = new HashSet<>(); itemsToRemove.addAll(itemMap.values()); for(int i = 0; i < maxDepth + 1; i++) { List<RepoFile> filesAtDepth = filesAtDepthMap.get(i); if(filesAtDepth != null) { for (RepoFile repoFile : filesAtDepth) { Path pathToFile = repoFile.getFilePath(); if (itemMap.containsKey(pathToFile)) { TreeItem<RepoFile> oldItem = itemMap.get(pathToFile); if (oldItem.getValue().equals(repoFile)) { itemsToRemove.remove(oldItem); } else { CheckBoxTreeItem<RepoFile> newItem = new CheckBoxTreeItem<>(repoFile, repoFile.diffButton); TreeItem<RepoFile> parent = oldItem.getParent(); newItem.setExpanded(oldItem.isExpanded()); newItem.getChildren().setAll(oldItem.getChildren()); parent.getChildren().set(parent.getChildren().indexOf(oldItem), newItem); itemsToRemove.remove(oldItem); itemMap.put(pathToFile, newItem); } } else { CheckBoxTreeItem<RepoFile> newItem = new CheckBoxTreeItem<>(repoFile, repoFile.diffButton); Path pathToParent = pathToFile.getParent(); boolean foundParent = false; while (pathToParent != null && !root.getValue().getFilePath().equals(pathToParent)) { if (itemMap.containsKey(pathToParent)) { TreeItem<RepoFile> parent = itemMap.get(pathToParent); Platform.runLater(() -> parent.getChildren().add(newItem)); foundParent = true; break; } pathToParent = pathToParent.getParent(); } if (!foundParent) { root.getChildren().add(newItem); } itemMap.put(pathToFile, newItem); itemsToRemove.remove(newItem); } } } } for (TreeItem<RepoFile> item : itemsToRemove) { Platform.runLater(() -> { if(item.getParent() != null) item.getParent().getChildren().remove(item); }); itemMap.remove(item.getValue().getFilePath()); } }
<filename>veles/ensemble/model_workflow.py # -*- coding: utf-8 -*- # pylint: disable=C0302 """ .. invisible: _ _ _____ _ _____ _____ | | | | ___| | | ___/ ___| | | | | |__ | | | |__ \ `--. | | | | __|| | | __| `--. \ \ \_/ / |___| |___| |___/\__/ / \___/\____/\_____|____/\____/ Created on June 11, 2015 Ensemble of machine learning algorithms - top level workflow. ███████████████████████████████████████████████████████████████████████████████ Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ███████████████████████████████████████████████████████████████████████████████ """ import os from tempfile import NamedTemporaryFile from zope.interface import implementer from veles.config import root from veles.ensemble.base_workflow import EnsembleWorkflowBase, \ EnsembleModelManagerBase from veles.plotting_units import AutoHistogramPlotter from veles.units import IUnit @implementer(IUnit) class EnsembleModelManager(EnsembleModelManagerBase): def __init__(self, workflow, **kwargs): super(EnsembleModelManager, self).__init__(workflow, **kwargs) self.size = kwargs["size"] self._train_ratio = kwargs["train_ratio"] self._fitnesses = [] self.plotters_are_disabled = kwargs.get( "plotters_are_disabled", root.common.ensemble.disable.plotting) @property def size(self): return len(self.results) @size.setter def size(self, value): if not isinstance(value, int): raise TypeError("size must be an integer (got %s)" % type(value)) if value < 1: raise ValueError("size must be > 0 (got %d)" % value) self._results[:] = [None] * value @property def train_ratio(self): return self._train_ratio @property def fitnesses(self): return self._fitnesses @property def plotters_are_disabled(self): return self._plotters_are_disabled @plotters_are_disabled.setter def plotters_are_disabled(self, value): self._plotters_are_disabled = value def initialize(self, **kwargs): super(EnsembleModelManager, self).initialize(**kwargs) if self.is_slave: self.size = 1 if self.testing: raise ValueError( "Ensemble training is incompatibe with --test mode. Use " "--ensemble-test instead.") def run(self): index = sum(1 for r in self.results if r is not None) with NamedTemporaryFile( prefix="veles-ensemble-", suffix=".json", mode="r") as fin: argv = ["--result-file", fin.name, "--stealth", "--train-ratio", str(self._train_ratio), "--log-id", self.launcher.log_id] + self._filtered_argv_ + \ ["root.common.ensemble.model_index=%d" % self._model_index, "root.common.ensemble.size=%d" % self.size, "root.common.disable.publishing=True"] if self.plotters_are_disabled: argv.append("root.common.disable.plotting=True") try: self.info("Training model %d / %d (#%d)...\n%s", index + 1, self.size, self._model_index, "-" * 80) train_result = self._exec(argv, fin, "train") if train_result is None: return try: id_ = train_result["id"] log_id = train_result["log_id"] snapshot = train_result["Snapshot"] except KeyError: self.error("Model #%d did not return a valid result", self._model_index) return self.info("Evaluating model %d / %d (#%d)...\n%s", index + 1, self.size, self._model_index, "-" * 80) argv = ["--test", "--snapshot", self._to_snapshot_arg( id_, log_id, snapshot)] + argv fin.seek(0, os.SEEK_SET) test_result = self._exec(argv, fin, "test") if test_result is None: return self.results[index] = train_result self.results[index].update(test_result) self._fitnesses.append(train_result["EvaluationFitness"]) finally: self._model_index += 1 class EnsembleModelWorkflow(EnsembleWorkflowBase): KWATTRS = set(EnsembleModelManager.KWATTRS) MANAGER_UNIT = EnsembleModelManager def __init__(self, workflow, **kwargs): super(EnsembleModelWorkflow, self).__init__(workflow, **kwargs) self.histogram = AutoHistogramPlotter(self) self.histogram.input = self.ensemble.fitnesses self.histogram.link_from(self.ensemble) self.repeater.unlink_from(self.ensemble) self.repeater.link_from(self.histogram) def run(load, main, **kwargs): load(EnsembleModelWorkflow, **kwargs) main()
package com.example.mac.bugfree.activity; import android.Manifest; import android.annotation.TargetApi; import android.content.ContentUris; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Build; import android.os.Environment; import android.provider.DocumentsContract; import android.provider.MediaStore; import android.support.annotation.NonNull; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.support.v4.content.FileProvider; import android.content.Context; import android.content.DialogInterface; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.location.Location; import android.location.LocationManager; import android.os.Build; import android.preference.PreferenceManager; import android.support.annotation.NonNull; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.Toolbar; import android.text.Editable; import android.text.TextWatcher; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.CheckBox; import android.widget.DatePicker; import android.widget.EditText; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.Spinner; import android.widget.TextView; import android.widget.TimePicker; import android.widget.Toast; import com.example.mac.bugfree.BuildConfig; import com.example.mac.bugfree.controller.ElasticsearchImageController; import com.example.mac.bugfree.controller.ElasticsearchImageOfflineController; import com.example.mac.bugfree.controller.ElasticsearchUserController; import com.example.mac.bugfree.module.Image; import com.example.mac.bugfree.module.ImageForElasticSearch; import com.example.mac.bugfree.module.MoodEvent; import com.example.mac.bugfree.module.MoodEventList; import com.example.mac.bugfree.exception.MoodStateNotAvailableException; import com.example.mac.bugfree.R; import com.example.mac.bugfree.module.User; import com.example.mac.bugfree.util.CurrentLocation; import com.example.mac.bugfree.util.InternetConnectionChecker; import com.example.mac.bugfree.util.LoadFile; import com.example.mac.bugfree.util.SaveFile; import org.osmdroid.util.GeoPoint; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import static com.example.mac.bugfree.R.id.expanded_menu; import static com.example.mac.bugfree.R.id.image; import static com.example.mac.bugfree.R.id.timePicker; import static java.util.Date.parse; /** * This class allow users to create a new mood event * * @author <NAME> */ public class CreateEditMoodActivity extends AppCompatActivity { final private int REQUEST_CODE_ASK_PERMISSIONS = 123; /** * The constant TAKE_PHOTO. */ public static final int TAKE_PHOTO = 1; /** * The constant CHOOSE_PHOTO. */ public static final int CHOOSE_PHOTO = 2; /** * The constant REQ_CODE_CHILD. */ public final static int REQ_CODE_CHILD = 233; private String current_user, mood_state , social_situation, reason, imagepath; private Date date = null; /** * The Set year. */ public int set_year = 0, /** * The Set month. */ set_month = 0, /** * The Set day. */ set_day = 0, /** * The Set hour. */ set_hour, /** * The Set minute. */ set_minute; private String test; private EditText create_edit_reason; private ImageView pic_preview, home_tab, earth_tab; private CheckBox current_time_checkbox, currentLocationCheckbox; /** * The Date of record. */ public GregorianCalendar dateOfRecord; private DatePicker simpleDatePicker; private TimePicker simpleTimePicker; private Uri imageFileUri; private GeoPoint currentLocation; private ImageForElasticSearch imageForElasticSearch = null; /** * onCreate begins from here * set the spinners, pickers and EditText, store them locally whenever something type in or spinner item been selected */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_create_edit_mood); ArrayAdapter<CharSequence> adapter1; ArrayAdapter<CharSequence> adapter2; create_edit_reason = (EditText)findViewById(R.id.create_edit_reason); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar_create_edit); setSupportActionBar(toolbar); home_tab = (ImageView) findViewById(R.id.home_tab_add); earth_tab = (ImageView) findViewById(R.id.earth_tab_add); Spinner social_situation_spinner= (Spinner)findViewById(R.id.social_situation); Spinner mood_state_spinner= (Spinner)findViewById(R.id.mood_state_spinner); pic_preview = (ImageView)findViewById(R.id.pic_preview); current_time_checkbox = (CheckBox)findViewById(R.id.current_time); simpleDatePicker = (DatePicker)findViewById(R.id.datePicker); simpleTimePicker = (TimePicker)findViewById(timePicker); simpleTimePicker.setIs24HourView(true); current_time_checkbox.setChecked(true); currentLocationCheckbox = (CheckBox) findViewById(R.id.current_location); if(current_time_checkbox.isChecked()){ simpleDatePicker.setEnabled(false); simpleTimePicker.setEnabled(false); } home_tab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { setResult(RESULT_OK); finish(); } }); earth_tab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { InternetConnectionChecker checker = new InternetConnectionChecker(); Context context = getApplicationContext(); final boolean isOnline = checker.isOnline(context); if(isOnline) { setResult(RESULT_OK); finish(); } else{ Toast.makeText(getApplicationContext(), "Map is not available when this device is offline.", Toast.LENGTH_LONG).show(); } } }); pic_preview.setImageResource(R.drawable.umood); adapter1 = ArrayAdapter.createFromResource(this,R.array.mood_states_array,android.R.layout.simple_spinner_item); adapter1.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mood_state_spinner.setAdapter(adapter1); mood_state_spinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener(){ @Override public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) { if(i>0){ mood_state = adapterView.getItemAtPosition(i).toString(); Toast.makeText(getApplicationContext(),mood_state+" is selected.",Toast.LENGTH_SHORT).show(); } else{ mood_state = null; } } @Override public void onNothingSelected(AdapterView<?> adapterView) { } }); adapter2 = ArrayAdapter.createFromResource(this,R.array.social_situation_array,android.R.layout.simple_spinner_item); adapter2.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); social_situation_spinner.setAdapter(adapter2); social_situation_spinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) { if (i>0) { social_situation = adapterView.getItemAtPosition(i).toString(); Toast.makeText(getApplicationContext(), social_situation + " is selected.", Toast.LENGTH_SHORT).show(); } else{ social_situation=null; } } @Override public void onNothingSelected(AdapterView<?> adapterView) { } }); create_edit_reason.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { if (create_edit_reason.getText().toString().split("\\s+").length>3){ create_edit_reason.setError("Only the first 3 words will be recorded"); } else { reason = create_edit_reason.getText().toString(); } } @Override public void afterTextChanged(Editable editable) { } }); current_time_checkbox.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { simpleDatePicker.setEnabled(!current_time_checkbox.isChecked()); if(Build.VERSION.SDK_INT>=23) simpleTimePicker.setEnabled(!current_time_checkbox.isChecked()); } }); currentLocationCheckbox.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { InternetConnectionChecker checker = new InternetConnectionChecker(); Context context = getApplicationContext(); final boolean isOnline = checker.isOnline(context); if(!isOnline) { currentLocationCheckbox.setChecked(false); Toast.makeText(getApplicationContext(), "Location is not available when this device is offline.", Toast.LENGTH_LONG).show(); } // if(isOnline) { // if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { // permissionLocationRequest(); // } // } add_location(); } }); Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(System.currentTimeMillis()); simpleDatePicker.init(calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH), new DatePicker.OnDateChangedListener() { @Override public void onDateChanged(DatePicker datePicker, int year, int month, int dayOfMonth) { set_year = simpleDatePicker.getYear(); set_month = simpleDatePicker.getMonth(); set_day = simpleDatePicker.getDayOfMonth(); } }); /** * API need to be greater than or equal to 23 to use the getHour() and getMinute() */ simpleTimePicker.setOnTimeChangedListener(new TimePicker.OnTimeChangedListener() { public void onTimeChanged(TimePicker view, int hourOfDay, int minute) { set_hour = simpleTimePicker.getHour(); set_minute = simpleTimePicker.getMinute(); } }); } /** * Set the tool bar * tick on right up corner */ public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_create_edit_mood, menu); return true; } /** * whenever the right up corner's tick is clicked * get the real_time() use as the "ID" * call the setMoodEvent function */ @Override public boolean onOptionsItemSelected(MenuItem item) { //handle presses on the action bar items switch (item.getItemId()) { case R.id.action_add_tick: SharedPreferences pref = getSharedPreferences("data", MODE_PRIVATE); current_user = pref.getString("currentUser", ""); if (mood_state == null) { Toast.makeText(getApplicationContext(), "Choose a mood state", Toast.LENGTH_SHORT).show(); break; } else { if (current_time_checkbox.isChecked()) { dateOfRecord = real_time(); } else { dateOfRecord = new GregorianCalendar(set_year, set_month + 1, set_day, set_hour, set_minute); } try { setMoodEvent(current_user, mood_state, social_situation, reason, imageForElasticSearch,currentLocation); } catch (MoodStateNotAvailableException e) { Log.i("Error", "(MoodState is Not Available"); } setResult(RESULT_OK); finish(); } return true; case R.id.expanded_menu_camera: if (Build.VERSION.SDK_INT >= 23) { if (checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { requestPermissions(new String[]{Manifest.permission.CAMERA}, 12345); } else { takeAPhoto(); } } else { takeAPhoto(); } return true; case R.id.expanded_menu_gallery: if (ContextCompat.checkSelfPermission(CreateEditMoodActivity.this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { ActivityCompat.requestPermissions(CreateEditMoodActivity.this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 1); } else { openAlbum(); } return true; } return super.onOptionsItemSelected(item); } /** * Using GPS to add current location. */ public void add_location(){ if (currentLocationCheckbox.isChecked()) { try { CurrentLocation locationListener = new CurrentLocation(); LocationManager locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE); locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, locationListener); Location location = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER); if( location != null ) { int latitude = (int) (location.getLatitude() * 1E6); int longitude = (int) (location.getLongitude() * 1E6); currentLocation = new GeoPoint(latitude, longitude); } } catch (SecurityException e) { e.printStackTrace(); } } else { currentLocation = null; } } /** * pass the data in * * @param current_user the current user * @param mood_state the mood state * @param social_situation the social situation * @param reason set the mood event and push it to online server * @param imageForElasticSearch the image for elastic search * @param currLocation the curr location * @throws MoodStateNotAvailableException the mood state not available exception */ public void setMoodEvent(String current_user, String mood_state, String social_situation, String reason, ImageForElasticSearch imageForElasticSearch, GeoPoint currLocation) throws MoodStateNotAvailableException{ User user = new User(); // When the moodEvent has been created, check for internet connection. // If online, sync to Elastic search and save locally. // If offline, save locally InternetConnectionChecker checker = new InternetConnectionChecker(); Context context = getApplicationContext(); final boolean isOnline = checker.isOnline(context); if(isOnline) { String query = current_user; ElasticsearchUserController.GetUserTask getUserTask = new ElasticsearchUserController.GetUserTask(); getUserTask.execute(query); try { user = getUserTask.get(); } catch (Exception e) { Log.i("Error", "Failed to get the User out of the async object"); } } else{ LoadFile load = new LoadFile(); user = load.loadUser(context); SharedPreferences.Editor editor = getSharedPreferences("data", MODE_PRIVATE).edit(); editor.putBoolean("hasBeenOffline", true); editor.apply(); } MoodEvent moodEvent = new MoodEvent(mood_state, current_user); moodEvent.setSocialSituation(social_situation); moodEvent.setTriggerText(reason); GregorianCalendar realT = real_time(); moodEvent.setRealtime(realT); moodEvent.setDateOfRecord(dateOfRecord); if (currLocation != null) { moodEvent.setLocation(currLocation); } if (imageForElasticSearch != null) { String uniqueID = realT.getTime().toString().replaceAll("\\s", "") + current_user; uniqueID = uniqueID.replaceAll(":",""); String OriginID = moodEvent.getPicId(); moodEvent.setPicId(uniqueID); if (isOnline){ uploadImage(imageForElasticSearch, uniqueID); ElasticsearchImageOfflineController elasticsearchImageOfflineController = new ElasticsearchImageOfflineController(); elasticsearchImageOfflineController.AddImageTask(context,imageForElasticSearch.getImageBase64(),uniqueID,null); }else { ElasticsearchImageOfflineController elasticsearchImageOfflineController = new ElasticsearchImageOfflineController(); elasticsearchImageOfflineController.AddImageTask(context,imageForElasticSearch.getImageBase64(),uniqueID,null); } } MoodEventList moodEventList = user.getMoodEventList(); moodEventList.addMoodEvent(moodEvent); if(isOnline) { ElasticsearchUserController.AddUserTask addUserTask = new ElasticsearchUserController.AddUserTask(); addUserTask.execute(user); SaveFile s = new SaveFile(context, user); } else{ SaveFile s = new SaveFile(context, user); SharedPreferences.Editor editor = getSharedPreferences("data", MODE_PRIVATE).edit(); editor.putBoolean("hasBeenOffline", true); editor.apply(); } } /** * This class allow the user to get the real time * * @return time gregorian calendar */ public GregorianCalendar real_time(){ GregorianCalendar time; GregorianCalendar current = new GregorianCalendar(); date = current.getTime(); SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.S"); fmt.applyPattern("yyyy MM dd HH mm ss"); try { date = fmt.parse(date.toString()); } catch (ParseException e) { Log.i("error message", ""); } test = fmt.format(date); String[] splited = test.split("\\s+"); int year = 0, month = 0, day = 0, hour = 0, minute = 0, second = 0; try { year = Integer.parseInt(splited[0]); } catch (NumberFormatException nfe) { Log.i("Error message","NumberFormatException"); } try { month = Integer.parseInt(splited[1]); } catch (NumberFormatException nfe) { Log.i("Error message","NumberFormatException"); } try { day = Integer.parseInt(splited[2]); } catch (NumberFormatException nfe) { Log.i("Error message","NumberFormatException"); } try { hour = Integer.parseInt(splited[3]); } catch (NumberFormatException nfe) { Log.i("Error message","NumberFormatException"); } try { minute = Integer.parseInt(splited[4]); } catch (NumberFormatException nfe) { Log.i("Error message","NumberFormatException"); } try { second = Integer.parseInt(splited[5]); } catch (NumberFormatException nfe){ Log.i("Error message","NumberFormatException"); } time = new GregorianCalendar(year, month, day, hour, minute, second); return time; } /** * Enter the camera to take photo */ private void takeAPhoto() { File folder = new File(getExternalCacheDir(), "output_img.jpg"); try { if (folder.exists()){ folder.delete(); } folder.createNewFile(); } catch (IOException e) { e.printStackTrace(); } if (Build.VERSION.SDK_INT >= 24) { imageFileUri = FileProvider.getUriForFile(CreateEditMoodActivity.this, "com.example.mac.bugfree.fileprovider", folder); } else { imageFileUri = Uri.fromFile(folder); } Intent intent = new Intent("android.media.action.IMAGE_CAPTURE"); intent.putExtra(MediaStore.EXTRA_OUTPUT, imageFileUri); startActivityForResult(intent, TAKE_PHOTO); } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { switch (requestCode) { case 1: if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { openAlbum(); } else { Toast.makeText(this, "You denied the permission", Toast.LENGTH_SHORT).show(); } break; case 12345: if (grantResults[0] == PackageManager.PERMISSION_GRANTED) { takeAPhoto(); // Now user should be able to use camera } else { // Your app will not have this permission. Turn off all functions // that require this permission or it will force close like your // original question Toast.makeText(this, "You denied the permission", Toast.LENGTH_SHORT).show(); } break; default: } } protected void onActivityResult(int requestCode, int resultCode, Intent data) { switch (requestCode) { case TAKE_PHOTO: if (resultCode == RESULT_OK){ try { Bitmap bitmap = BitmapFactory. decodeStream(getContentResolver().openInputStream(imageFileUri)); pic_preview.setImageBitmap(bitmap); Image image = new Image(bitmap); imageForElasticSearch = new ImageForElasticSearch(image.getImageBase64()); } catch (FileNotFoundException e) { e.printStackTrace(); } } break; case CHOOSE_PHOTO: if (resultCode == RESULT_OK) { if (Build.VERSION.SDK_INT >= 19) { handleImageOnKitKat(data); } else { handleImageBeforeKitKat(data); } } break; // when go back to this intent case REQ_CODE_CHILD: if (resultCode == RESULT_OK){ // get the point Double lat = data.getDoubleExtra("chosenLocationLat",0); Double lon = data.getDoubleExtra("chosenLocationLon",0); String mess = data.getStringExtra("flag"); if (mess == null){ currentLocation = new GeoPoint(lat, lon); } else { currentLocation = null; } } break; default: break; } } @TargetApi(19) private void handleImageOnKitKat(Intent data) { String imagePath = null; Uri uri = data.getData(); if (DocumentsContract.isDocumentUri(this,uri)) { String docId = DocumentsContract.getDocumentId(uri); if ("com.android.providers.media.documents".equals(uri.getAuthority())) { String id = docId.split(":")[1]; String selection = MediaStore.Images.Media._ID + "=" +id; imagePath = getImagePath(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, selection); } else if ("com.android.providers.downloads.documents".equals(uri.getAuthority())) { Uri contentUri = ContentUris.withAppendedId(Uri. parse("content://downloads/public_downloads"), Long.valueOf(docId)); imagePath = getImagePath(contentUri, null); } } else if ("content".equalsIgnoreCase(uri.getScheme())) { imagePath = getImagePath(uri, null); } else if ("file".equalsIgnoreCase(uri.getScheme())) { imagePath = uri.getPath(); } displayImage(imagePath); } private void handleImageBeforeKitKat(Intent data) { Uri uri = data.getData(); String imagePath = getImagePath(uri, null); displayImage(imagePath); } /** * get the image path of album or camera * * @param uri uri * @param selection selected position * @return path */ private String getImagePath(Uri uri, String selection) { String path = null; Cursor cursor = getContentResolver().query(uri, null, selection, null, null); if (cursor != null) { if (cursor.moveToFirst()) { path = cursor.getString(cursor.getColumnIndex(MediaStore.Images.Media.DATA)); } cursor.close(); } return path; } /** * Get the image path and set the picture preview as selected images * @param imagePath imagepath */ private void displayImage(String imagePath) { if (imagePath != null) { Bitmap bitmap = BitmapFactory.decodeFile(imagePath); Image image = new Image(bitmap); imageForElasticSearch = new ImageForElasticSearch(image.getImageBase64()); pic_preview.setImageBitmap(bitmap); } else { Toast.makeText(this, "failed to get image", Toast.LENGTH_SHORT).show(); } } /** * allow users to open the album and select photos from gallery */ private void openAlbum(){ Intent intent = new Intent("android.intent.action.GET_CONTENT"); intent.setType("image/*"); startActivityForResult(intent, CHOOSE_PHOTO); } @Override protected void onStart() { super.onStart(); InternetConnectionChecker checker = new InternetConnectionChecker(); Context context = getApplicationContext(); final boolean isOnline = checker.isOnline(context); if(isOnline) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { permissionLocationRequest(); } } } /** * Grand the gps permission */ private void permissionLocationRequest() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { int hasLocationPermission = checkSelfPermission(Manifest.permission.ACCESS_FINE_LOCATION); if (hasLocationPermission != PackageManager.PERMISSION_GRANTED) { if(!shouldShowRequestPermissionRationale(Manifest.permission.ACCESS_FINE_LOCATION)) { showMessageOKCancel("You need to allow access to Location", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { requestPermissions(new String[] {Manifest.permission.ACCESS_FINE_LOCATION}, REQUEST_CODE_ASK_PERMISSIONS); } }); } } } } private void showMessageOKCancel(String message, DialogInterface.OnClickListener okListener) { new AlertDialog.Builder(CreateEditMoodActivity.this) .setMessage(message) .setPositiveButton("OK", okListener) .setNegativeButton("Cancel", null) .create() .show(); } /** * set the image and push it to online server * @param ifes * @param uniqueId */ private void uploadImage (ImageForElasticSearch ifes, String uniqueId){ ifes.setUniqueId(uniqueId); ElasticsearchImageController.AddImageTask addImageTask = new ElasticsearchImageController.AddImageTask(); addImageTask.execute(ifes); } /** * When the Location TextView is chosen, if it is online then change to the map, * if it is offline then send the warning message * * @param v the view */ public void chooseLocation(View v) { InternetConnectionChecker checker = new InternetConnectionChecker(); Context context = getApplicationContext(); final boolean isOnline = checker.isOnline(context); if(isOnline) { if (currentLocationCheckbox.isChecked()) { Toast.makeText(getApplicationContext(), "Sorry, You have already chosen CURRENT LOCATION.", Toast.LENGTH_LONG).show(); } else { Intent child = new Intent(getApplicationContext(), ChooseLocationOnMapActivity.class); startActivityForResult(child, REQ_CODE_CHILD); } } else{ Toast.makeText(getApplicationContext(), "Map is not available when this device is offline.", Toast.LENGTH_LONG).show(); } } }
Political Participation in Britain: a Research Agenda for a New Study HISTORICALLY SPEAKING, THE THEME OF POLITICAL PARTICIpation and the set of issues connected with it are as old as politics itself, because they touch on some of the most central and perennial questions of political life – who decides, where are the boundaries of community and citizenship to be drawn, who benefits, how will decisions be made? However, beyond this, participation has from time to time become a particularly central and salient issue in British politics. In the seventeenth century the issues revolved around the ‘claims of the gentry and merchant classes to play a larger part in the making of government policy’. In the nineteenth and early twentieth centuries, the issue moved on to representation of the nonropertied classes – the town worker, the rural worker and Etterly universal suffrage.
def _OutputUrlsForCommandLine(urls, file_handle=None): file_handle = file_handle or sys.stdout def _StartsWithHttp(url): return url.startswith('https://') or url.startswith('http://') urls = [u if _StartsWithHttp(u) else 'https://%s' % u for u in urls] file_handle.write('Affected bugs: %s\n' % ' '.join(urls))
<filename>src/app/store/dummy/dummy.actions.ts import { createAction, props } from '@ngrx/store'; import { DummyInterface } from './dummy.interface'; /** * Load Action will be used for NGRX Action emitter, when the Dummy info Load Action is being called */ export const Load = createAction('[DUMMY] LOAD'); /** * LoadFail Action will be used for NGRX Action emitter, when the Dummy info is failed */ export const LoadFail = createAction( '[DUMMY] LOAD FAIL', props<{ /** * errorMessage represents the errorMessage when type LOAD_FAIL is called */ errorMessage: string; }>(), ); /** * LoadSuccess Action will be used for NGRX Action emitter, when the Dummy info is succesfull */ export const LoadSuccess = createAction( '[DUMMY] LOAD SUCCESS', props<{ /** * entity represents the DummyInterface used for the reducer */ entity: DummyInterface; }>(), ); /** * ClearError Action will be used for NGRX Action emitter, when the error needs to be cleared */ export const ClearError = createAction('[DUMMY] CLEAR ERROR');
/** * Workaround for WELD-2245 which prevents the use of @ApplicationScoped for EntityManagerFactory directly */ @ApplicationScoped public class EntityManagerFactoryHolder { private EntityManagerFactory emf; @PostConstruct public void init() { this.emf = Persistence.createEntityManagerFactory("default", null); } @PreDestroy public void destroy() { if (emf.isOpen()) { emf.close(); } } @Produces @RequestScoped public EntityManager getEntityManager() { return emf.createEntityManager(); } public void dispose(@Disposes EntityManager entityManager) { if (entityManager.isOpen()) { entityManager.close(); } } @Produces public EntityManagerFactory getEntityManagerFactory() { return emf; } }
export * from './components/tag/Tag';
/* * Public Domain Software * * I (<NAME>) am the author of the source code in this file. * I have placed the source code in this file in the public domain. * * For further information see: http://creativecommons.org/publicdomain/zero/1.0/ */ /* Package errorutil contains common error objects and functions. */ package errorutil import ( "bytes" ) /* AssertOk will panic on any non-nil error parameter. */ func AssertOk(err error) { if err != nil { panic(err.Error()) } } /* AssertTrue will panic if the given condition is negative. */ func AssertTrue(condition bool, errString string) { if !condition { panic(errString) } } /* CompositeError can collect multiple errors in a single error object. */ type CompositeError struct { Errors []error } /* NewCompositeError creates a new composite error object. */ func NewCompositeError() *CompositeError { return &CompositeError{make([]error, 0)} } /* Add adds an error. */ func (ce *CompositeError) Add(e error) { ce.Errors = append(ce.Errors, e) } /* HasErrors returns true if any error have been collected. */ func (ce *CompositeError) HasErrors() bool { return len(ce.Errors) > 0 } /* Error returns all collected errors as a string. */ func (ce *CompositeError) Error() string { var buf bytes.Buffer for i, e := range ce.Errors { buf.WriteString(e.Error()) if i < len(ce.Errors)-1 { buf.WriteString("; ") } } return buf.String() }
/** * Created by macbury on 26.10.14. */ public class VoxelIntersector { private final Vector3 size = new Vector3(); private final Vector3 off = new Vector3(); private final Vector3 pos = new Vector3(); private final Vector3 dir = new Vector3(); private final Vector3i index = new Vector3i(); private final Vector3 delta = new Vector3(); private final Vector3i sign = new Vector3i(); private final Vector3 max = new Vector3(); private int limit; private int plotted; public VoxelIntersector(Vector3 voxelSize) { size.set(voxelSize); } public void begin(Vector3i offset, Vector3i origin, Vector3 direction, int limit) { off.set( offset.x, offset.y, offset.z ); this.limit = limit; pos.set( origin.x, origin.y, origin.z ); dir.set( direction ).nor(); delta.set(dir.x / size.x, dir.y / size.y, dir.z / size.z); sign.x = (dir.x > 0) ? 1 : (dir.x < 0 ? -1 : 0); sign.y = (dir.y > 0) ? 1 : (dir.y < 0 ? -1 : 0); sign.z = (dir.z > 0) ? 1 : (dir.z < 0 ? -1 : 0); reset(); } public boolean next() { if (plotted++ > 0) { float mx = sign.x * max.x; float my = sign.y * max.y; float mz = sign.z * max.z; if (mx < my && mx < mz) { max.x += delta.x; index.x += sign.x; } else if (mz < my && mz < mx) { max.z += delta.z; index.z += sign.z; } else { max.y += delta.y; index.y += sign.y; } } return (plotted <= limit); } public Vector3i get(){ return index; } private void reset() { plotted = 0; index.x = (int)Math.floor((pos.x - off.x) / size.x); index.y = (int)Math.floor((pos.y - off.y) / size.y); index.z = (int)Math.floor((pos.z - off.z) / size.z); float ax = index.x * size.x + off.x; float ay = index.y * size.y + off.y; float az = index.z * size.z + off.z; max.x = (sign.x > 0) ? ax + size.x - pos.x : pos.x - ax; max.y = (sign.y > 0) ? ay + size.y - pos.y : pos.y - ay; max.z = (sign.z > 0) ? az + size.z - pos.z : pos.z - az; max.set( max.x / dir.x, max.y / dir.y, max.z / dir.z ); } public void end() { plotted = limit + 1; } }
/** * @date: 2019/1/21 11:23 * @author: Chunjiang Mao * @classname: Keyboard * @describe: */ public class Keyboard { /** * 软件盘影藏 */ public static final String EVENT_KEYBOARD_HIDE = "keyboardHide"; /** * 软件盘显示 */ public static final String EVENT_KEYBOARD_SHOW = "keyboardShow"; private int height; public int getHeight() { return height; } public void setHeight(int height) { this.height = height; } }
import sys;input=sys.stdin.readline T, = map(int, input().split()) for _ in range(T): N, = map(int, input().split()) c = 0 R = 0 suc = 1 moku = 0 for i in range(N+1): if i < N: t, x = map(int, input().split()) else: t, x = 10**18, 0 if i > 0: nc = c + hou*(t-bt) if min(nc,c) <= moku <= max(nc, c): nc = moku suc = (nc==moku) if min(nc,c) <= b <= max(nc, c): R += 1 # print(t, x, c, nc, moku) c = nc if suc: moku = x hou = 1 if moku-c > 0 else 0 if x-c<0: hou = -1 b = x bt = t print(R)
//visualize a functional object evaluation - draws a line class myFuncVisObj extends myBaseDistVisObj{ public myFuncVisObj(myDistFuncHistVis _owner, int[][] _clrs) { super(_owner, _clrs); } @Override protected void _drawCurve(IRenderInterface pa, float offset) { pa.drawEllipse2D(dispVals[0][0], dispVals[0][1], 5.0f,5.0f); for (int idx = 1; idx <dispVals.length;++idx) { //draw point pa.drawEllipse2D(dispVals[idx][0], dispVals[idx][1], 5.0f,5.0f); //draw line between points pa.drawLine(dispVals[idx-1][0], dispVals[idx-1][1], 0, dispVals[idx][0], dispVals[idx][1], 0); } drawAxes(pa, 0); }//_drawCurve }
/** * Constructs input vectors which have a fixed size and at position i contain the index of item i * @param samplingStrategy original data or subsample a session * @param maxSize vector size * @return */ public List<double[]> fetchTrainContSessionVectors(String samplingStrategy, int maxSize) { Map<String, List<String>> trainSessionSequence = mapTrainSessionSequence(); List<double[]> sessionVectors = new ArrayList<>(); trainSessionSequence.keySet().stream().forEach( (sessionId) -> { List<String> itemSequence = trainSessionSequence.get(sessionId); double[] originalVector = new double[maxSize]; IntStream.range(0, itemSequence.size()).forEach( (sequenceIndex) -> { if (sequenceIndex >= maxSize) { return; } String item = itemSequence.get(sequenceIndex); Double index = popItemIndicies.get(item); originalVector[sequenceIndex] = index; if (samplingStrategy.equals(ConfConstants.DATA_SAMPL_SUBSAMPLE)) { double[] subVector = new double[maxSize]; IntStream.range(0, sequenceIndex).forEach( (subSequenceIndex) -> { String subItem = itemSequence.get(subSequenceIndex); Double subIndex = popItemIndicies.get(subItem); subVector[sequenceIndex] = subIndex; }); sessionVectors.add(subVector); } }); sessionVectors.add(originalVector); }); return sessionVectors; }
<reponame>HiltonRoscoe/exchangerxml /* * $Id: BreakpointProperties.java,v 1.1 2004/03/25 18:56:35 edankert Exp $ * * Copyright (C) 2002, Cladonia Ltd. All rights reserved. * * This software is the proprietary information of Cladonia Ltd. * Use is subject to license terms. */ package com.cladonia.xngreditor.scenario; import com.cladonia.xml.XElement; import com.cladonia.xml.properties.Properties; /** * Handles the properties for a Transformation scenario. * * @version $Revision: 1.1 $, $Date: 2004/03/25 18:56:35 $ * @author Dogsbay */ public class BreakpointProperties extends Properties { public static final String BREAKPOINT = "breakpoint"; private static final String URL = "url"; private static final String LINE = "line"; private static final String ENABLED = "enabled"; /** * Constructor for the scenario properties. * * @param element the element that contains the properties, * for the scenario-type. */ public BreakpointProperties( XElement element) { super( element); } /** * Constructor for the scenario properties. * * @param props the higher level properties object. */ public BreakpointProperties( Properties props) { super( props.getElement()); } /** * Constructor for the scenario properties, creates a copy. * * @param props the original to copy. */ public BreakpointProperties( BreakpointProperties original) { super( new XElement( BREAKPOINT)); setURL( original.getURL()); setLine( original.getLine()); } /** * Constructor for the scenario properties, creates a copy. * * @param props the original to copy. */ public BreakpointProperties( String url, int line, boolean enabled) { super( new XElement( BREAKPOINT)); setURL( url); setLine( line); setEnabled( enabled); } /** * Constructor for a new scenario properties object. */ public BreakpointProperties() { super( new XElement( BREAKPOINT)); } /** * Return the url. * * @return the url. */ public String getURL() { return getText( URL); } /** * Set the url. * * @param url the breakpoint url. */ public void setURL( String url) { set( URL, url); } /** * Return the line number. * * @return the line number. */ public int getLine() { return getInteger( LINE); } /** * Set the line number. * * @param line the line number. */ public void setLine( int line) { set( LINE, line); } /** * Return wether this breakpoint is enabled. * * @return true when this breakpoint is enabled. */ public boolean isEnabled() { return getBoolean( ENABLED, true); } /** * Set wether this breakpoint is enabled. * * @param enabled the enables the breakpoint. */ public void setEnabled( boolean enabled) { set( ENABLED, enabled); } }
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) 2020-2021 JetBrains s.r.o. * Licensed under the MIT License. See License.txt in the project root for license information. */ package com.microsoft.intellij.serviceexplorer.azure.storage; import com.intellij.openapi.project.Project; import com.microsoft.intellij.forms.CreateQueueForm; import com.microsoft.tooling.msservices.helpers.Name; import com.microsoft.tooling.msservices.serviceexplorer.AzureActionEnum; import com.microsoft.tooling.msservices.serviceexplorer.NodeActionEvent; import com.microsoft.tooling.msservices.serviceexplorer.NodeActionListener; import com.microsoft.tooling.msservices.serviceexplorer.azure.storage.QueueModule; @Name("New Queue") public class CreateQueueAction extends NodeActionListener { private QueueModule queueModule; public CreateQueueAction(QueueModule queueModule) { this.queueModule = queueModule; } @Override public void actionPerformed(NodeActionEvent e) { CreateQueueForm form = new CreateQueueForm((Project) queueModule.getProject()); form.setStorageAccount(queueModule.getStorageAccount()); form.setOnCreate(() -> { queueModule.removeAllChildNodes(); queueModule.load(false); }); form.show(); } @Override public AzureActionEnum getAction() { return AzureActionEnum.CREATE; } }
/** * Implementation of the Sardine interface. This is where the meat of the Sardine library lives. * * @author jonstevens */ public class SardineImpl implements Sardine { private static final Logger log = Logger.getLogger(DavResource.class.getName()); private static final String UTF_8 = "UTF-8"; /** * HTTP client implementation */ protected CloseableHttpClient client; /** * HTTP client configuration */ private HttpClientBuilder builder; /** * Local context with authentication cache. Make sure the same context is used to execute * logically related requests. */ protected HttpClientContext context = HttpClientContext.create(); /** * Access resources with no authentication */ public SardineImpl() { this.builder = this.configure(null, null); this.client = this.builder.build(); } /** * Access resources with Bearer authorization */ public SardineImpl(String bearerAuth) { Header bearerHeader = new BasicHeader("Authorization", "Bearer " + bearerAuth); this.builder = this.configure(null, null).setDefaultHeaders(Collections.singletonList(bearerHeader)); this.client = this.builder.build(); } /** * Supports standard authentication mechanisms * * @param username Use in authentication header credentials * @param password Use in authentication header credentials */ public SardineImpl(String username, String password) { this.builder = this.configure(null, this.createDefaultCredentialsProvider(username, password, null, null)); this.client = this.builder.build(); } /** * @param username Use in authentication header credentials * @param password Use in authentication header credentials * @param selector Proxy configuration */ public SardineImpl(String username, String password, ProxySelector selector) { this.builder = this.configure(selector, this.createDefaultCredentialsProvider(username, password, null, null)); this.client = this.builder.build(); } /** * @param builder Custom client configuration */ public SardineImpl(HttpClientBuilder builder) { this.builder = builder; this.client = this.builder.build(); } /** * @param builder Custom client configuration * @param username Use in authentication header credentials * @param password Use in authentication header credentials */ public SardineImpl(HttpClientBuilder builder, String username, String password) { this.builder = builder; this.setCredentials(username, password); this.client = this.builder.build(); } /** * Add credentials to any scope. Supports Basic, Digest and NTLM authentication methods. * * @param username Use in authentication header credentials * @param password Use in authentication header credentials */ @Override public void setCredentials(String username, String password) { this.setCredentials(username, password, "", ""); } /** * @param username Use in authentication header credentials * @param password Use in authentication header credentials * @param domain NTLM authentication * @param workstation NTLM authentication */ @Override public void setCredentials(String username, String password, String domain, String workstation) { this.setCredentials(this.createDefaultCredentialsProvider(username, password, domain, workstation)); } public void setCredentials(CredentialsProvider provider) { this.context.setCredentialsProvider(provider); this.context.setAttribute(HttpClientContext.TARGET_AUTH_STATE, new AuthState()); } private CredentialsProvider createDefaultCredentialsProvider(String username, String password, String domain, String workstation) { CredentialsProvider provider = new BasicCredentialsProvider(); if (username != null) { provider.setCredentials( new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.NTLM), new NTCredentials(username, password, workstation, domain)); provider.setCredentials( new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.BASIC), new UsernamePasswordCredentials(username, password)); provider.setCredentials( new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.DIGEST), new UsernamePasswordCredentials(username, password)); provider.setCredentials( new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.SPNEGO), new NTCredentials(username, password, workstation, domain)); provider.setCredentials( new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.KERBEROS), new UsernamePasswordCredentials(username, password)); } return provider; } /** * Adds handling of GZIP compression to the client. */ @Override public void enableCompression() { this.builder.addInterceptorLast(new RequestAcceptEncoding()); this.builder.addInterceptorLast(new ResponseContentEncoding()); this.client = this.builder.build(); } /** * Disable GZIP compression header. */ @Override public void disableCompression() { this.builder.disableContentCompression(); this.client = this.builder.build(); } /** * Ignores cookies by always returning the IgnoreSpecFactory regardless of the cookieSpec value being looked up. */ @Override public void ignoreCookies() { this.builder.setDefaultCookieSpecRegistry(new Lookup<CookieSpecProvider>() { @Override public CookieSpecProvider lookup(String name) { return new IgnoreSpecProvider(); } }); this.client = this.builder.build(); } @Override public void enablePreemptiveAuthentication(String hostname) { enablePreemptiveAuthentication(hostname, -1, -1); } @Override public void enablePreemptiveAuthentication(URL url) { final String host = url.getHost(); final int port = url.getPort(); final String protocol = url.getProtocol(); final int httpPort; final int httpsPort; if ("https".equals(protocol)) { httpsPort = port; httpPort = -1; } else if ("http".equals(protocol)) { httpPort = port; httpsPort = -1; } else { throw new IllegalArgumentException("Unsupported protocol " + protocol); } enablePreemptiveAuthentication(host, httpPort, httpsPort); } @Override public void enablePreemptiveAuthentication(String hostname, int httpPort, int httpsPort) { enablePreemptiveAuthentication(hostname, httpPort, httpsPort, Consts.ISO_8859_1); } public void enablePreemptiveAuthentication(String hostname, int httpPort, int httpsPort, Charset credentialsCharset) { AuthCache cache = this.context.getAuthCache(); if (cache == null) { // Add AuthCache to the execution context cache = new BasicAuthCache(); this.context.setAuthCache(cache); } // Generate Basic preemptive scheme object and stick it to the local execution context BasicScheme basicAuth = new BasicScheme(credentialsCharset); // Configure HttpClient to authenticate preemptively by prepopulating the authentication data cache. cache.put(new HttpHost(hostname, httpPort, "http"), basicAuth); cache.put(new HttpHost(hostname, httpsPort, "https"), basicAuth); } @Override public void disablePreemptiveAuthentication() { this.context.removeAttribute(HttpClientContext.AUTH_CACHE); } @Override public List<DavResource> getResources(String url) throws IOException { return this.list(url); } @Override public List<DavResource> list(String url) throws IOException { return this.list(url, 1); } @Override public List<DavResource> list(String url, int depth) throws IOException { return list(url, depth, true); } @Override public List<DavResource> list(String url, int depth, boolean allProp) throws IOException { if (allProp) { Propfind body = new Propfind(); body.setAllprop(new Allprop()); return propfind(url, depth, body); } else { return list(url, depth, Collections.<QName>emptySet()); } } @Override public List<DavResource> list(String url, int depth, java.util.Set<QName> props) throws IOException { Propfind body = new Propfind(); Prop prop = new Prop(); ObjectFactory objectFactory = new ObjectFactory(); prop.setGetcontentlength(objectFactory.createGetcontentlength()); prop.setGetlastmodified(objectFactory.createGetlastmodified()); prop.setCreationdate(objectFactory.createCreationdate()); prop.setDisplayname(objectFactory.createDisplayname()); prop.setGetcontenttype(objectFactory.createGetcontenttype()); prop.setResourcetype(objectFactory.createResourcetype()); prop.setGetetag(objectFactory.createGetetag()); addCustomProperties(prop, props); body.setProp(prop); return propfind(url, depth, body); } @Override public List<DavResource> propfind(String url, int depth, java.util.Set<QName> props) throws IOException { Propfind body = new Propfind(); Prop prop = new Prop(); addCustomProperties(prop, props); body.setProp(prop); return propfind(url, depth, body); } private void addCustomProperties(Prop prop, java.util.Set<QName> props) { List<Element> any = prop.getAny(); for (QName entry : props) { Element element = SardineUtil.createElement(entry); any.add(element); } } protected List<DavResource> propfind(String url, int depth, Propfind body) throws IOException { HttpPropFind entity = new HttpPropFind(url); entity.setDepth(depth < 0 ? "infinity" : Integer.toString(depth)); entity.setEntity(new StringEntity(SardineUtil.toXml(body), UTF_8)); Multistatus multistatus = this.execute(entity, new MultiStatusResponseHandler()); List<Response> responses = multistatus.getResponse(); List<DavResource> resources = new ArrayList<DavResource>(responses.size()); for (Response response : responses) { try { resources.add(new DavResource(response)); } catch (URISyntaxException e) { log.warning(String.format("Ignore resource with invalid URI %s", response.getHref().get(0))); } } return resources; } public <T> T report(String url, int depth, SardineReport<T> report) throws IOException { HttpReport entity = new HttpReport(url); entity.setDepth(depth < 0 ? "infinity" : Integer.toString(depth)); entity.setEntity(new StringEntity(report.toXml(), UTF_8)); Multistatus multistatus = this.execute(entity, new MultiStatusResponseHandler()); return report.fromMultistatus(multistatus); } public List<DavResource> search(String url, String language, String query) throws IOException { HttpEntityEnclosingRequestBase search = new HttpSearch(url); SearchRequest searchBody = new SearchRequest(language, query); String body = SardineUtil.toXml(searchBody); search.setEntity(new StringEntity(body, UTF_8)); Multistatus multistatus = this.execute(search, new MultiStatusResponseHandler()); List<Response> responses = multistatus.getResponse(); List<DavResource> resources = new ArrayList<DavResource>(responses.size()); for (Response response : responses) { try { resources.add(new DavResource(response)); } catch (URISyntaxException e) { log.warning(String.format("Ignore resource with invalid URI %s", response.getHref().get(0))); } } return resources; } @Override public void setCustomProps(String url, Map<String, String> set, List<String> remove) throws IOException { this.patch(url, SardineUtil.toQName(set), SardineUtil.toQName(remove)); } @Override public List<DavResource> patch(String url, Map<QName, String> setProps) throws IOException { return this.patch(url, setProps, Collections.<QName>emptyList()); } /** * Creates a {@link com.github.sardine.model.Propertyupdate} element containing all properties to set from setProps and all properties to * remove from removeProps. Note this method will use a {@link com.github.sardine.util.SardineUtil#CUSTOM_NAMESPACE_URI} as * namespace and {@link com.github.sardine.util.SardineUtil#CUSTOM_NAMESPACE_PREFIX} as prefix. */ @Override public List<DavResource> patch(String url, Map<QName, String> setProps, List<QName> removeProps) throws IOException { List<Element> setPropsElements = new ArrayList<Element>(); for (Entry<QName, String> entry : setProps.entrySet()) { Element element = SardineUtil.createElement(entry.getKey()); element.setTextContent(entry.getValue()); setPropsElements.add(element); } return this.patch(url, setPropsElements, removeProps); } /** * Creates a {@link com.github.sardine.model.Propertyupdate} element containing all properties to set from setProps and all properties to * remove from removeProps. Note this method will use a {@link com.github.sardine.util.SardineUtil#CUSTOM_NAMESPACE_URI} as * namespace and {@link com.github.sardine.util.SardineUtil#CUSTOM_NAMESPACE_PREFIX} as prefix. */ @Override public List<DavResource> patch(String url, List<Element> setProps, List<QName> removeProps) throws IOException { HttpPropPatch entity = new HttpPropPatch(url); // Build WebDAV <code>PROPPATCH</code> entity. Propertyupdate body = new Propertyupdate(); // Add properties { Set set = new Set(); body.getRemoveOrSet().add(set); Prop prop = new Prop(); // Returns a reference to the live list List<Element> any = prop.getAny(); for (Element element : setProps) { any.add(element); } set.setProp(prop); } // Remove properties { if (!removeProps.isEmpty()) { Remove remove = new Remove(); body.getRemoveOrSet().add(remove); Prop prop = new Prop(); // Returns a reference to the live list List<Element> any = prop.getAny(); for (QName entry : removeProps) { Element element = SardineUtil.createElement(entry); any.add(element); } remove.setProp(prop); } } entity.setEntity(new StringEntity(SardineUtil.toXml(body), UTF_8)); Multistatus multistatus = this.execute(entity, new MultiStatusResponseHandler()); List<Response> responses = multistatus.getResponse(); List<DavResource> resources = new ArrayList<DavResource>(responses.size()); for (Response response : responses) { try { resources.add(new DavResource(response)); } catch (URISyntaxException e) { log.warning(String.format("Ignore resource with invalid URI %s", response.getHref().get(0))); } } return resources; } @Override public String lock(String url) throws IOException { HttpLock entity = new HttpLock(url); Lockinfo body = new Lockinfo(); Lockscope scopeType = new Lockscope(); scopeType.setExclusive(new Exclusive()); body.setLockscope(scopeType); Locktype lockType = new Locktype(); lockType.setWrite(new Write()); body.setLocktype(lockType); entity.setEntity(new StringEntity(SardineUtil.toXml(body), UTF_8)); // Return the lock token return this.execute(entity, new LockResponseHandler()); } @Override public String refreshLock(String url, String token, String file) throws IOException { HttpLock entity = new HttpLock(url); entity.setHeader("If", "<" + file + "> (<" + token + ">)"); return this.execute(entity, new LockResponseHandler()); } @Override public void unlock(String url, String token) throws IOException { HttpUnlock entity = new HttpUnlock(url, token); Lockinfo body = new Lockinfo(); Lockscope scopeType = new Lockscope(); scopeType.setExclusive(new Exclusive()); body.setLockscope(scopeType); Locktype lockType = new Locktype(); lockType.setWrite(new Write()); body.setLocktype(lockType); this.execute(entity, new VoidResponseHandler()); } @Override public void setAcl(String url, List<DavAce> aces) throws IOException { HttpAcl entity = new HttpAcl(url); // Build WebDAV <code>ACL</code> entity. Acl body = new Acl(); body.setAce(new ArrayList<Ace>()); for (DavAce davAce : aces) { // protected and inherited acl must not be part of ACL http request if (davAce.getInherited() != null || davAce.isProtected()) { continue; } Ace ace = davAce.toModel(); body.getAce().add(ace); } entity.setEntity(new StringEntity(SardineUtil.toXml(body), UTF_8)); this.execute(entity, new VoidResponseHandler()); } @Override public DavAcl getAcl(String url) throws IOException { HttpPropFind entity = new HttpPropFind(url); entity.setDepth("0"); Propfind body = new Propfind(); Prop prop = new Prop(); prop.setOwner(new Owner()); prop.setGroup(new Group()); prop.setAcl(new Acl()); body.setProp(prop); entity.setEntity(new StringEntity(SardineUtil.toXml(body), UTF_8)); Multistatus multistatus = this.execute(entity, new MultiStatusResponseHandler()); List<Response> responses = multistatus.getResponse(); if (responses.isEmpty()) { return null; } else { return new DavAcl(responses.get(0)); } } @Override public DavQuota getQuota(String url) throws IOException { HttpPropFind entity = new HttpPropFind(url); entity.setDepth("0"); Propfind body = new Propfind(); Prop prop = new Prop(); prop.setQuotaAvailableBytes(new QuotaAvailableBytes()); prop.setQuotaUsedBytes(new QuotaUsedBytes()); body.setProp(prop); entity.setEntity(new StringEntity(SardineUtil.toXml(body), UTF_8)); Multistatus multistatus = this.execute(entity, new MultiStatusResponseHandler()); List<Response> responses = multistatus.getResponse(); if (responses.isEmpty()) { return null; } else { return new DavQuota(responses.get(0)); } } @Override public List<DavPrincipal> getPrincipals(String url) throws IOException { HttpPropFind entity = new HttpPropFind(url); entity.setDepth("1"); Propfind body = new Propfind(); Prop prop = new Prop(); prop.setDisplayname(new Displayname()); prop.setResourcetype(new Resourcetype()); prop.setPrincipalURL(new PrincipalURL()); body.setProp(prop); entity.setEntity(new StringEntity(SardineUtil.toXml(body), UTF_8)); Multistatus multistatus = this.execute(entity, new MultiStatusResponseHandler()); List<Response> responses = multistatus.getResponse(); if (responses.isEmpty()) { return null; } else { List<DavPrincipal> collections = new ArrayList<DavPrincipal>(); for (Response r : responses) { if (r.getPropstat() != null) { for (Propstat propstat : r.getPropstat()) { if (propstat.getProp() != null && propstat.getProp().getResourcetype() != null && propstat.getProp().getResourcetype().getPrincipal() != null) { collections.add(new DavPrincipal(DavPrincipal.PrincipalType.HREF, r.getHref().get(0), propstat.getProp().getDisplayname().getContent().get(0))); } } } } return collections; } } @Override public List<String> getPrincipalCollectionSet(String url) throws IOException { HttpPropFind entity = new HttpPropFind(url); entity.setDepth("0"); Propfind body = new Propfind(); Prop prop = new Prop(); prop.setPrincipalCollectionSet(new PrincipalCollectionSet()); body.setProp(prop); entity.setEntity(new StringEntity(SardineUtil.toXml(body), UTF_8)); Multistatus multistatus = this.execute(entity, new MultiStatusResponseHandler()); List<Response> responses = multistatus.getResponse(); if (responses.isEmpty()) { return null; } else { List<String> collections = new ArrayList<String>(); for (Response r : responses) { if (r.getPropstat() != null) { for (Propstat propstat : r.getPropstat()) { if (propstat.getProp() != null && propstat.getProp().getPrincipalCollectionSet() != null && propstat.getProp().getPrincipalCollectionSet().getHref() != null) { collections.addAll(propstat.getProp().getPrincipalCollectionSet().getHref()); } } } } return collections; } } @Override public ContentLengthInputStream get(String url) throws IOException { return this.get(url, Collections.<String, String>emptyMap()); } @Override public ContentLengthInputStream get(String url, Map<String, String> headers) throws IOException { List<Header> list = new ArrayList<Header>(); for (Map.Entry<String, String> h : headers.entrySet()) { list.add(new BasicHeader(h.getKey(), h.getValue())); } return this.get(url, list); } public ContentLengthInputStream get(String url, List<Header> headers) throws IOException { HttpGet get = new HttpGet(url); for (Header header : headers) { get.addHeader(header); } // Must use #execute without handler, otherwise the entity is consumed // already after the handler exits. HttpResponse response = this.execute(get); VoidResponseHandler handler = new VoidResponseHandler(); try { handler.handleResponse(response); // Will abort the read when closed before EOF. return new ContentLengthInputStream(new HttpMethodReleaseInputStream(response), response.getEntity().getContentLength()); } catch (IOException ex) { get.abort(); throw ex; } } @Override public void put(String url, byte[] data) throws IOException { this.put(url, data, null); } @Override public void put(String url, byte[] data, String contentType) throws IOException { ByteArrayEntity entity = new ByteArrayEntity(data); this.put(url, entity, contentType, true); } @Override public void put(String url, InputStream dataStream) throws IOException { this.put(url, dataStream, (String) null); } @Override public void put(String url, InputStream dataStream, String contentType) throws IOException { this.put(url, dataStream, contentType, true); } @Override public void put(String url, InputStream dataStream, String contentType, boolean expectContinue) throws IOException { // A length of -1 means "go until end of stream" put(url, dataStream, contentType, expectContinue, -1); } @Override public void put(String url, InputStream dataStream, String contentType, boolean expectContinue, long contentLength) throws IOException { InputStreamEntity entity = new InputStreamEntity(dataStream, contentLength); this.put(url, entity, contentType, expectContinue); } @Override public void put(String url, InputStream dataStream, Map<String, String> headers) throws IOException { List<Header> list = new ArrayList<Header>(); for (Map.Entry<String, String> h : headers.entrySet()) { list.add(new BasicHeader(h.getKey(), h.getValue())); } this.put(url, dataStream, list); } public void put(String url, InputStream dataStream, List<Header> headers) throws IOException { // A length of -1 means "go until end of stream" InputStreamEntity entity = new InputStreamEntity(dataStream, -1); this.put(url, entity, headers); } /** * Upload the entity using <code>PUT</code> * * @param url Resource * @param entity The entity to read from * @param contentType Content Type header * @param expectContinue Add <code>Expect: continue</code> header */ public void put(String url, HttpEntity entity, String contentType, boolean expectContinue) throws IOException { List<Header> headers = new ArrayList<Header>(); if (contentType != null) { headers.add(new BasicHeader(HttpHeaders.CONTENT_TYPE, contentType)); } if (expectContinue) { headers.add(new BasicHeader(HTTP.EXPECT_DIRECTIVE, HTTP.EXPECT_CONTINUE)); } this.put(url, entity, headers); } /** * Upload the entity using <code>PUT</code> * * @param url Resource * @param entity The entity to read from * @param headers Headers to add to request */ public void put(String url, HttpEntity entity, List<Header> headers) throws IOException { this.put(url, entity, headers, new VoidResponseHandler()); } public <T> T put(String url, HttpEntity entity, List<Header> headers, ResponseHandler<T> handler) throws IOException { HttpPut put = new HttpPut(url); put.setEntity(entity); for (Header header : headers) { put.addHeader(header); } if (entity.getContentType() == null && !put.containsHeader(HttpHeaders.CONTENT_TYPE)) { put.addHeader(HttpHeaders.CONTENT_TYPE, HTTP.DEF_CONTENT_CHARSET.name()); } try { return this.execute(put, handler); } catch (HttpResponseException e) { if (e.getStatusCode() == HttpStatus.SC_EXPECTATION_FAILED) { // Retry with the Expect header removed put.removeHeaders(HTTP.EXPECT_DIRECTIVE); if (entity.isRepeatable()) { return this.execute(put, handler); } } throw e; } } @Override public void put(String url, File localFile, String contentType) throws IOException { //don't use ExpectContinue for repetable FileEntity, some web server (IIS for exmaple) may return 400 bad request after retry put(url, localFile, contentType, false); } @Override public void put(String url, File localFile, String contentType, boolean expectContinue) throws IOException { FileEntity content = new FileEntity(localFile); this.put(url, content, contentType, expectContinue); } @Override public void delete(String url) throws IOException { HttpDelete delete = new HttpDelete(url); this.execute(delete, new VoidResponseHandler()); } @Override public void move(String sourceUrl, String destinationUrl) throws IOException { move(sourceUrl, destinationUrl, true); } @Override public void move(String sourceUrl, String destinationUrl, boolean overwrite) throws IOException { HttpMove move = new HttpMove(sourceUrl, destinationUrl, overwrite); this.execute(move, new VoidResponseHandler()); } @Override public void copy(String sourceUrl, String destinationUrl) throws IOException { copy(sourceUrl, destinationUrl, true); } @Override public void copy(String sourceUrl, String destinationUrl, boolean overwrite) throws IOException { HttpCopy copy = new HttpCopy(sourceUrl, destinationUrl, overwrite); this.execute(copy, new VoidResponseHandler()); } @Override public void createDirectory(String url) throws IOException { HttpMkCol mkcol = new HttpMkCol(url); this.execute(mkcol, new VoidResponseHandler()); } @Override public boolean exists(String url) throws IOException { HttpHead head = new HttpHead(url); return this.execute(head, new ExistsResponseHandler()); } /** * Validate the response using the response handler. Aborts the request if there is an exception. * * @param <T> Return type * @param request Request to execute * @param responseHandler Determines the return type. * @return parsed response */ protected <T> T execute(HttpRequestBase request, ResponseHandler<T> responseHandler) throws IOException { return execute(context, request, responseHandler); } /** * No validation of the response. Aborts the request if there is an exception. * * @param request Request to execute * @return The response to check the reply status code */ protected HttpResponse execute(HttpRequestBase request) throws IOException { return execute(context, request, null); } /** * Common method as single entry point responsible fo request execution * @param context clientContext to be used when executing request * @param request Request to execute * @param responseHandler can be null if you need raw HttpResponse or not null response handler for result handling. * @param <T> will return raw HttpResponse when responseHandler is null or value reslved using provided ResponseHandler instance * @return value resolved using response handler or raw HttpResponse when responseHandler is null */ protected <T> T execute(HttpClientContext context, HttpRequestBase request, ResponseHandler<T> responseHandler) throws IOException { HttpContext requestLocalContext = new BasicHttpContext(context); try { if (responseHandler != null) { return this.client.execute(request, responseHandler, requestLocalContext); } else { return (T) this.client.execute(request, requestLocalContext); } } catch (HttpResponseException e) { // Don't abort if we get this exception, caller may want to repeat request. throw e; } catch (IOException e) { request.abort(); throw e; } finally { context.setAttribute(HttpClientContext.USER_TOKEN, requestLocalContext.getAttribute(HttpClientContext.USER_TOKEN)); } } @Override public void shutdown() throws IOException { this.client.close(); } /** * Creates a client with all of the defaults. * * @param selector Proxy configuration or null * @param credentials Authentication credentials or null */ protected HttpClientBuilder configure(ProxySelector selector, CredentialsProvider credentials) { Registry<ConnectionSocketFactory> schemeRegistry = this.createDefaultSchemeRegistry(); HttpClientConnectionManager cm = this.createDefaultConnectionManager(schemeRegistry); String version = Version.getSpecification(); if (version == null) { version = VersionInfo.UNAVAILABLE; } return HttpClients.custom() .setUserAgent("Sardine/" + version) .setDefaultCredentialsProvider(credentials) .setRedirectStrategy(this.createDefaultRedirectStrategy()) .setDefaultRequestConfig(RequestConfig.custom() // Only selectively enable this for PUT but not all entity enclosing methods .setExpectContinueEnabled(false).build()) .setConnectionManager(cm) .setRoutePlanner(this.createDefaultRoutePlanner(this.createDefaultSchemePortResolver(), selector)); } protected DefaultSchemePortResolver createDefaultSchemePortResolver() { return new DefaultSchemePortResolver(); } protected SardineRedirectStrategy createDefaultRedirectStrategy() { return new SardineRedirectStrategy(); } /** * Creates a new registry for default ports with socket factories. */ protected Registry<ConnectionSocketFactory> createDefaultSchemeRegistry() { return RegistryBuilder.<ConnectionSocketFactory>create() .register("http", this.createDefaultSocketFactory()) .register("https", this.createDefaultSecureSocketFactory()) .build(); } /** * @return Default socket factory */ protected ConnectionSocketFactory createDefaultSocketFactory() { return PlainConnectionSocketFactory.getSocketFactory(); } /** * @return Default SSL socket factory */ protected ConnectionSocketFactory createDefaultSecureSocketFactory() { return SSLConnectionSocketFactory.getSocketFactory(); } /** * Use fail fast connection manager when connections are not released properly. * * @param schemeRegistry Protocol registry * @return Default connection manager */ protected HttpClientConnectionManager createDefaultConnectionManager(Registry<ConnectionSocketFactory> schemeRegistry) { return new PoolingHttpClientConnectionManager(schemeRegistry); } /** * Override to provide proxy configuration * * @param resolver Protocol registry * @param selector Proxy configuration * @return ProxySelectorRoutePlanner configured with schemeRegistry and selector */ protected HttpRoutePlanner createDefaultRoutePlanner(SchemePortResolver resolver, ProxySelector selector) { return new SystemDefaultRoutePlanner(resolver, selector); } }
package com.r307.arbitrader.config; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; /** * Configuration for notifications to other services like email, Slack and Discord. */ @ConfigurationProperties("notifications") @Configuration public class NotificationConfiguration { private Slack slack = new Slack(); private Logs logs = new Logs(); private Mail mail = new Mail(); private Discord discord = new Discord(); public Slack getSlack() { return slack; } public void setSlack(Slack slack) { this.slack = slack; } public Logs getLogs() { return logs; } public void setLogs(Logs logs) { this.logs = logs; } public Mail getMail() { return mail; } public void setMail(Mail mail) { this.mail = mail; } public Discord getDiscord() { return discord; } public void setDiscord(Discord discord) { this.discord = discord; } public class Slack { private String accessToken; private String botAccessToken; private String channel; public String getAccessToken() { return accessToken; } public void setAccessToken(String accessToken) { this.accessToken = accessToken; } public String getBotAccessToken() { return botAccessToken; } public void setBotAccessToken(String botAccessToken) { this.botAccessToken = botAccessToken; } public String getChannel() { return channel; } public void setChannel(String channel) { this.channel = channel; } } public class Logs { private Integer slowTickerWarning = 3000; public Integer getSlowTickerWarning() { return slowTickerWarning; } public void setSlowTickerWarning(Integer slowTickerWarning) { this.slowTickerWarning = slowTickerWarning; } } public class Mail { private Boolean active; private String from; private String to; public Boolean getActive() { return active; } public void setActive(Boolean active) { this.active = active; } public String getFrom() { return from; } public void setFrom(String from) { this.from = from; } public String getTo() { return to; } public void setTo(String to) { this.to = to; } } public class Discord { private String webhookId; private String webhookToken; public String getWebhookId() { return webhookId; } public void setWebhookId(String webhookId) { this.webhookId = webhookId; } public String getWebhookToken() { return webhookToken; } public void setWebhookToken(String webhookToken) { this.webhookToken = webhookToken; } } }
def render_extra_variables_templates( self, logger, context, template_filepaths=None, filepath_dir=None, filename_prefix=EXTRA_VARS_FILENAME_PREFIX, filename_suffix=EXTRA_VARS_FILENAME_SUFFIX ): template_filepaths = template_filepaths or self.extra_variables_template_files filepath_dir = filepath_dir or os.getcwd() filepaths = [] for template in self._extra_variables_templates(template_filepaths): with tempfile.NamedTemporaryFile( prefix=filename_prefix, suffix=filename_suffix, dir=filepath_dir, delete=False ) as f: f.write( gluetool.utils.render_template(template, logger=logger, **context) ) f.flush() os.chmod(f.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH) filepaths.append(f.name) return filepaths
Friends of a farmer who killed himself following a long dispute with the industry furious at being handed Lifeline leaflets with gas company branding Gas company funding of suicide prevention counselling in Queensland’s gasfields has sparked fresh controversy after a government department distributed a brochure for the service to friends of a farmer who took his life after a long battle with the industry. Family friends of the late George Bender, whose suicide in October brought national attention to rural community tensions with gas companies, were furious to receive the Queensland Gas Company-branded leaflet at a meeting with the state’s chief health officer, Jeanette Young, last month in Chinchilla. Queensland mining chief says activists hijacked death of outspoken farmer Read more Staff from the department of communities gave out the leaflet, in which the charity Lifeline and QGC in prominent lettering “proudly introduce the Western Downs counseling project”, at a meeting Young held with locals impacted by the coal seam gas industry. Uniting Church minister Graham Slaughter said the distribution of the leaflet was “inappropriate and insensitive in the context in which it was used”. “I believe it was inappropriate because it was left with people who were very close with George Bender, who are affected by the Linc Energy business, and who are also affected by coal seam gas,” he told Guardian Australia. Slaughter said he found the partnership between QGC and Lifeline Darling Downs “disturbing” and was concerned it could undermine the credibility of the otherwise well-regarded charity. “If this was Lifeline on the Gold Coast handing something out, perhaps people wouldn’t think twice. But to do it here, that’s like waving a red rag at a bull as far as I was concerned.” The Department of Communities issued a statement apologising “for any offence or distress caused to local people”. Malcolm Turnbull will 'look into' suicide of anti-CSG farmer George Bender Read more “It will not be distributing this brochure in future,” a spokeswoman said. She said departmental staff had handed it out during Young’s visit “as it was the only one they had with information about this local counselling service”. The chief executive of Lifeline Darling Downs, Derek Tuffield, said the service had in fact ended in March, when QGC’s funding of $1.3m under its “social investment partnership” program from 2011 had run out. “It’s a shame someone had an old brochure and circulated it because the service doesn’t function at the moment,” he said. However, Tuffield said he would give “serious consideration” to accepting more QGC funding “because my role is to respond to community needs”. “But in saying that, the sensitivity now around the anger [towards the industry], that would certainly have to be factored into how that was done,” he said. “I’m very aware of the anger in the community that sits around this since George’s very tragic suicide. “It’s a very sensitive topic at the moment but at the time we struck the sponsorship up in 2011, coal seam gas was in its infancy.” Tuffield said no state or federal funding had been available when Lifeline’s partnership with QGC began, at a time when the impact of industry expansion in the area was “starting to show about domestic violence, about excess alcohol, about excess gambling, about financial pressure, about increased rent for houses and so on”. Gas company sponsorship has been a contentious issue in Queensland, where police were last year forced to defend the use of the Santos logo on vehicles that appeared on the Darling Downs. Anti-CSG activists complained that this raised conflict of interest questions when police attended protest sites. Gas company sponsorship extending to the media industry also led to an incongruous scenario at this year’s Walkley awards for journalism. Linc Energy, which has declared more than $260,000 in donations to the Walkley board through the Media, Entertainment and Arts Alliance (MEAA) since 2010, has been prohibited from carrying out mining in an area of the Darling Downs by the Queensland government while being prosecuted over the contamination of hundreds of square kilometres of farmland around its experimental plant in the area. This includes George Bender’s property. ABC reporters Mark Willacy and Mark Solomons won the Walkley for radio news and current affairs for their coverage of the Linc issue – at an event of which Linc was a major sponsor. Willacy told Guardian Australia: “I think there’s a positive reflection on the union in that it obviously didn’t influence at all the awarding of that particular Walkley.” “In fact, it maybe highlights that the union’s Walkleys board is independent in its functions, as it should be. And it’s up to the union who they accept as sponsors.” Queensland police defend use of vehicles branded with Santos logo Read more The chief execuitve of the MEAA, Paul Murphy, said he could not comment on sponsorship arrangements for the Walkleys, as they were run by a board independent from the union. Tuffield said he accepted that potential damage to the credibility of Lifeline’s counselling program through association with a gas company was “was obviously part of the risk initially for us”. “I’d hate to think it damages us in any way because it was done obviously with the best intentions for clients,” he said. “I have 300-odd families that we responded to with ongoing counselling quite often to support them getting through a whole range of issues, and a number of those issues had nothing to do with CSG, more with relationships and domestic violence and financial stress. “Do I turn my back on that and say it’s not my problem when I’ve got this offer of funding?” “For non profits at the moment, if you’re not partnering with corporates or other agencies, you’re not going to survive into the future because state and federal funding is reducing year by year.” Tuffield said Lifeline currently had a counsellor based in the gasfields town of Dalby funded by the Royal Flying Doctor Service. • For information and support in Australia, call Lifeline on 13 11 14, Mensline on 1300 789 978 or Beyond Blue on 1300 22 4636.
California Assemblyman Nathan Fletcher quits GOP POLITICS FILE - In this Jan. 30, 2012 file photo, Assemblyman Nathan Fletcher, R-San Diego, speaks during the Assembly session at the Capitol in Sacramento, Calif. Fletcher says he's leaving the Republican Party to become an independent barely two months before voters in the nation's eighth-largest city decide whether to elect him mayor. (AP Photo/Rich Pedroncelli, File) less FILE - In this Jan. 30, 2012 file photo, Assemblyman Nathan Fletcher, R-San Diego, speaks during the Assembly session at the Capitol in Sacramento, Calif. Fletcher says he's leaving the Republican Party to ... more Photo: Rich Pedroncelli, Associated Press Photo: Rich Pedroncelli, Associated Press Image 1 of / 1 Caption Close California Assemblyman Nathan Fletcher quits GOP 1 / 1 Back to Gallery California Assemblyman Nathan Fletcher, a San Diego mayoral candidate whom the state GOP has considered one of its most promising future leaders, said Wednesday that he is fed up with the "petty games" of hyper-partisan politics and has abandoned the Republican Party to become an independent. Fletcher, 35 and a decorated Iraq war veteran, becomes the only independent lawmaker in Sacramento. He told The Chronicle in an interview that he and his wife - Mindy Tucker Fletcher, the 2000 presidential campaign spokeswoman for George W. Bush - re-registered as "decline to state" voters on Wednesday. The decision, he said, follows "a long track record of frustration with the partisan environment" hobbling politics in California, a climate that "doesn't allow a focus on solutions and doesn't allow you to work with the other side if you think they're worthy." It's a bold move for Fletcher, who with Republican Whip Rep. Kevin McCarthy of Bakersfield had been viewed as among a handful of potential Republican candidates for high-profile statewide races, including U.S. Senate and congressional seats. The move also represents an ominous sign for the California Republican Party, which has seen its voter rolls decline as it tacks increasingly to the right in the solidly blue state. Fletcher, who earned the wrath of some Republicans by working with Gov. Jerry Brown and other Democrats on key tax issues, said he has "always been willing to step up and do what my conscience told me was right." Since his election to the Assembly in 2008, Fletcher set himself apart from other GOP lawmakers by supporting gay rights measures, such as San Francisco Democratic state Sen. Mark Leno's FAIR Education Act that requires school textbooks to recount the contributions of lesbian, gay, bisexual and transgender people; and with his emotional 2010 speech on the Assembly floor about his opposition, as a veteran, to the military's now-defunct "don't ask, don't tell" ban on openly gay service members. His experience in the armed services played a large part in his decision to leave the GOP, Fletcher said this week. "Going to war changes you," he said. "You feel a sense of obligation and to say, 'I need to make my life count,' " he said. Saying he has developed "a low tolerance" for the kind of infighting that goes on in the halls of Sacramento, Fletcher added, "I did not fight a war to come back and play games." Problems in GOP Some political observers said Fletcher's move is a sign of increasing problems for the state GOP. Republican registration lags 13 points behind Democratic in California, where independents represent more than 1 in 5 voters. The GOP does not hold a single statewide office, and the governor's office and both houses of the Legislature are held by Democrats. "The GOP in California has believed that voters will come around to them," said Adam Mendelsohn, a veteran GOP strategist and former communications director for moderate Republican Gov. Arnold Schwarzenegger. But increasingly, he said, political developments in the state suggest that "the only way the GOP achieves any relevance is if they come around to the voters." But GOP conservative activist Jon Fleischman, whose popular Flashreport.org website has been vociferous in its criticism of Fletcher's moderate politics, said Wednesday of the assemblyman's defection: "If this is a divorce, then he filed the papers." Fleischman said Fletcher sought the endorsement of his party in the San Diego mayoral race two weeks ago and "someone else got it" - conservative Republican Carl DeMaio. The San Diego councilman is openly gay, Fleischman said. No matter how Fletcher "wants to frame it, a lot of people are going to look at it as sour grapes," Fleischman added. Future implications The move to become an independent doesn't preclude Fletcher from seeking higher office someday - and could help him build a base. Pollsters say the state's growing bloc of independent voters view partisan politics unfavorably and that the state's new top-two primary system - which advances the two candidates with the most votes, regardless of political party, to the general election - could further sideline the GOP. One party strategist who has known Fletcher for years said the legislator was warned about the implications that becoming an independent would have on his political future. "It was discussed that if you do this, you will be taking yourself off the list" for U.S. Sen. Barbara Boxer's seat "or other congressional seats," said the strategist, who spoke on condition of anonymity. Speaker not surprised Assembly Speaker John Pérez, D-Los Angeles, who said he knows Fletcher well and has enjoyed working with him, was not surprised by the assemblyman's decision. Pérez said Fletcher has "consistently been willing to work to find common ground not just with me but with all of his colleagues." Several Sacramento watchers said that while Fletcher's decision to go independent depletes the ranks of moderate Republicans in the capital, it is unlikely to shake up the political calculus there. Fletcher had appeared increasingly estranged from staunch GOP legislative conservatives and rarely went to his party's caucus meetings. On Wednesday, Fletcher said his focus was on his mayoral campaign. "I care a great deal for my city," he said. "Republican voters and Democratic voters should know nothing has changed in terms of my positions and my principles. Now, I'll be an independent for them."
Novel MYO5B mutation in microvillous inclusion disease of Syrian ancestry Microvillus inclusion disease (MVID) is a rare autosomal recessive condition characterized by a lack of microvilli on the surface of enterocytes, resulting in severe, life-threatening diarrhea that could lead to mortality within the first year of life. We identify two unrelated families, each with one child presenting with severe MVID from birth. Using trio whole-exome sequencing, we observed that the two families share a novel nonsense variant (Glu1589*) in the MYO5B gene, a type Vb myosin motor protein in which rare damaging mutations were previously described to cause MVID. This founder mutation was very rare in public databases and is likely specific to patients of Syrian ancestry. We present a detailed account of both patients’ clinical histories to fully characterize the effect of this variant and expand the genotype–phenotype databases for MVID patients from the Middle East. INTRODUCTION Microvillus inclusion disease (MVID; MIM # 251850), also known as congenital microvillus atrophy, was first described by Davidson et al. (1978). It is a rare autosomal recessive disease that presents with an intractable life-threatening watery diarrhea either within the first days of life (early-onset form) or at several months of life (late-onset form) (Ruemmele et al. 2006). The hallmarks of MVID are a lack of microvilli on the surface of villous enterocytes, the occurrence of microvillous inclusions, and the cytoplasmic accumulation of periodic acid-Schiffpositive vesicles (Davidson et al. 1978;Cutz et al. 1989;Ruemmele et al. 2010). Müller et al. (2008) showed that mutations in MYO5B (MIM # 606540), encoding the unconventional type Vb myosin motor protein, were associated with MVID in an extended Turkish kindred. Since then, more mutations were described in different populations (Dhekne et al. 2018). In this report, we describe a novel mutation in two unrelated Syrian patients with MVID. Since the first day of life, the patient was started on breast milk but developed severe diarrhea leading to dehydration and acidosis. Changing the feeds to elemental formula (Neocene LCP, SHS) made no difference to her symptoms. The oral feeding was discontinued and she was started on total parenteral nutrition (TPN). Examinations of other organ involvement such as cardiovascular system, lungs, and kidney were unremarkable. Nonetheless, the patient had frequent hospital admissions because of dehydration and sepsis. During her disease, the patient also developed cholestasis with pruritus because of prolonged TPN use. Her serum bile acids and liver enzymes were high, and she was treated with ursodeoxycholic acid to improve cholestasis. Investigations for infection, cystic fibrosis, metabolic, and immunodeficiency disorders were all unremarkable. The patient was also seen and investigated extensively by the endocrinology team to rule out congenital adrenal hyperplasia (CAH) as a cause of diarrhea. Histological analysis of multiple gastrointestinal biopsies was obtained postendoscopy. In this patient, the histopathology of the duodenal mucosa showed subtotal and total villous atrophy with slightly elongated crypts. There were no granuloma, dysplasia, nor malignancies seen. A vacuolated apical cytoplasm was noted in hematoxylin and eosin (H&E) sections and highlighted by special stains (PAS) that showed cytoplasmic staining of the luminal aspect of enterocytes. Immunohistochemical staining for polyclonal anti-CEA, CD10 stains illustrated microvillus abnormalities including patchy normal band-like brush border with the internalization of brush border in the cytoplasm. The overall morphological and immunohistochemical appearances were broadly compatible with the clinical impression of MVID. Microscopic examination of fragments of the large bowel mucosa and the stomach revealed no significant histological abnormality. Whole-exome sequence (WES) identified a novel mutation in the MYO5B gene. Her clinical course and outcome are summarized in Table 1. Nine months later, the mother became pregnant. Amniocentesis at 12 wk for genetic testing for the MYO5B mutation was positive; the parents chose to abort the baby. Six months later, the mother became pregnant again. Amniocentesis done at 13-wk gestation was negative, and 6 mo later the mother gave birth to a healthy baby. Patient 2 (MVID-2) A Syrian boy was born at 34-wk gestation to consanguineous parents originally from the western part of Syria. This was their first baby and there was no history of abortion. The Investigations for infection, cystic fibrosis, metabolic, and immunodeficiency disorders were all unremarkable. At the age of 6 mo, the patient developed acute disseminated encephalomyelitis (ADEM)-an immune-mediated inflammatory demyelinating condition that predominately affects the white matter of the brain and spinal cord. Convulsive seizures occur around the onset of ADEM in as many as 35% of cases as well as long tract signs such as clonus and increased muscle stretch reflexes that can occur in as many as 85% of cases. ADEM was only present in MVID-2. Patient 2 also developed cholestasis with pruritis, in which liver enzymes level and serum bile acid levels were elevated most likely because of the prolonged TPN use. This was also present in Patient 1. Investigation of sections from the small intestine showed loss of villous architecture with mild crypt hyperplasia. There also appeared to be "internalization" with a patchy distribution of the brush border evident in PAS stain as well as on CD10 (immunohistochemical stains done with appropriate controls). Both the villous atrophy and "internalization of brush border" were suggestive of MVID. Investigation with WES revealed the same mutation as in Patient 1. Trials of even small amounts of different formulae caused significant diarrhea and electrolyte disturbances. His clinical course is summarized in Table 1. At 23 mo of age, the child died. DISCUSSION Geographically, there is a relatively high prevalence of MVID in the Mediterranean regionalmost 50% of all the patients with reported geographical information (Halac et al. 2011;van der Velde et al. 2013). There have been no reported MVID cases from Syria, and the two patients reported in this report are the first MVID cases of Syrian ancestry. Although both families were not closely related and hailed from two different areas in Syria, they both carried the same MYO5B novel mutation Glu1589 * . Because this variant is globally very rare, its presence in two families from the same country suggests that this is a geographically specific founder mutation. As expected from the predicted loss-of-function effect on the protein, the clinical presentation showed severe disease in both patients who could hardly tolerate oral feeding and were completely dependent on TPN. Both patients had developmental delays, consistent with previously reported MVID cases (Phillips and Schmitz 1992;Halac et al. 2011). Patient 2 also developed intractable myoclonic seizures that needed three anti-epileptic medications to control; this complication was never reported in previous MVID cases. Patient 1 developed pruritus and cholestasis consistent with progressive familial intrahepatic cholestasis-like illness, which has been previously described in MVID (Girard et al. 2014). It is unclear to what extent these extra-intestinal phenotypes may be iatrogenic or linked to the MYO5B mutations, and patient-specific disease models are eagerly awaited (van der Velde et al. 2013). In conclusion, this is the first report of MVID in Syrian patients. The rarity of this mutation in public databases yet being homozygous in two unrelated Syrian patients suggests this is a Syrian founder mutation. The utility of identifying causative mutations within a family allows for reliable genetic counseling, prenatal screening, and embryo selection by preimplantation genetic testing. METHODS WES of proband and parents was done using Agilent's SureSelect v5 platform and sequenced on Illumina's HiSeq 2000. Reads were mapped to the human reference genome GRCh37/hg19 using BWA 0.5.9 and variants called using GATK Best Practices (as described in Fakhro et al. 2019). Each exome was verified to have a minimum average depth of 50× (Tables 2 and 3). Causative variants were identified by searching for high-quality (quality score >100), rare (minor allele frequency <0.1%), recessive mutations with a predicted pathogenic impact (loss-of-function mutation, and/or GERP score > 2, PolyPhen score > 0.9, CADD score > 20). Candidate variants surviving this filtration were manually evaluated for genes with reasonable implications in disease by the literature and database searches. Genetic Testing The exomes of all the available family members were sequenced to a minimum depth of 54× (mean depth 73.5×) and variant calls according to best practices. Within the framework of reported consanguinity, variants were filtered based on being recessive (homozygous in the index and heterozygous in the parents), rare, and predicted damaging by a range of evolutionary scores. Both sets of patients were found to have the nonsense mutation p.Glu1589 * in the MYO5B gene ( Figs. 1 and 2). This protein-truncating mutation has a reported global allele frequency of <0.001% and is predicted to cause loss of gene function. ADDITIONAL INFORMATION Data Deposition and Access The informed consent signed by the parents protected the privacy of study participants, and such data cannot be made publicly available. The novel variant has been deposited into ClinVar (https://www.ncbi.nlm.nih.gov/clinvar/) under accession number VCV000620180. Ethics Statement This study was performed with Institutional Review Board (IRB) approval for Human Subjects Research at Hamad Medical Corporation and Weill Cornell Medicine-Qatar. Research participants were recruited using IRB-approved protocols and informed consents. Acknowledgments This work was supported by the Qatar Foundation National Priorities Research Program (NPRP11S-0110-180250).
Enhancement of in vitro growth and resistance to gray mould of Vitis vinifera co-cultured with plant growth-promoting rhizobacteria. The potential of a plant growth-promoting rhizobacterium, Pseudomonas sp. (strain PsJN), to stimulate the growth and enhancement of the resistance of grapevine (Vitis vinifera L.) transplants to gray mould caused by Botrytis cinerea has been investigated. In vitro inoculation of grapevine plantlets induced a significant plant growth promotion which made them more hardy and vigorous when compared to non-inoculated plantlets. This ability increased upon transplanting. When grown together with B. cinerea, the causal agent of gray mould, significant differences of aggressiveness were observed between the inoculated and non-inoculated plants. The presence of bacteria was accompanied by an induction of plant resistance to the pathogen. The beneficial effect from this plant-microbe association is being postulated.
pylons = raw_input() pylon_array = map(int, raw_input().split()) # cur = -pylon_array[0] print max(pylon_array) # for i in range(0,len(pylon_array)-1): # cur = cur + pylon_array[i] - pylon_array[i+1] # print cur # print -cur
// SPDX-License-Identifier: GPL-2.0+ /* * Take drivers/gpio/gpio-74x164.c as reference. * * 74Hx164 - Generic serial-in/parallel-out 8-bits shift register GPIO driver * * Copyright (C) 2016 Peng Fan <[email protected]> * */ #include <common.h> #include <errno.h> #include <dm.h> #include <fdtdec.h> #include <malloc.h> #include <asm/global_data.h> #include <asm/gpio.h> #include <asm/io.h> #include <dm/device_compat.h> #include <dt-bindings/gpio/gpio.h> #include <spi.h> DECLARE_GLOBAL_DATA_PTR; /* * struct gen_74x164_chip - Data for 74Hx164 * * @oe: OE pin * @nregs: number of registers * @buffer: buffer for chained chips */ #define GEN_74X164_NUMBER_GPIOS 8 struct gen_74x164_priv { struct gpio_desc oe; u32 nregs; /* * Since the nregs are chained, every byte sent will make * the previous byte shift to the next register in the * chain. Thus, the first byte sent will end up in the last * register at the end of the transfer. So, to have a logical * numbering, store the bytes in reverse order. */ u8 *buffer; }; static int gen_74x164_write_conf(struct udevice *dev) { struct gen_74x164_priv *priv = dev_get_priv(dev); int ret; ret = dm_spi_claim_bus(dev); if (ret) return ret; ret = dm_spi_xfer(dev, priv->nregs * 8, priv->buffer, NULL, SPI_XFER_BEGIN | SPI_XFER_END); dm_spi_release_bus(dev); return ret; } static int gen_74x164_get_value(struct udevice *dev, unsigned offset) { struct gen_74x164_priv *priv = dev_get_priv(dev); uint bank = priv->nregs - 1 - offset / 8; uint pin = offset % 8; return (priv->buffer[bank] >> pin) & 0x1; } static int gen_74x164_set_value(struct udevice *dev, unsigned offset, int value) { struct gen_74x164_priv *priv = dev_get_priv(dev); uint bank = priv->nregs - 1 - offset / 8; uint pin = offset % 8; int ret; if (value) priv->buffer[bank] |= 1 << pin; else priv->buffer[bank] &= ~(1 << pin); ret = gen_74x164_write_conf(dev); if (ret) return ret; return 0; } static int gen_74x164_direction_input(struct udevice *dev, unsigned offset) { return -ENOSYS; } static int gen_74x164_direction_output(struct udevice *dev, unsigned offset, int value) { return gen_74x164_set_value(dev, offset, value); } static int gen_74x164_get_function(struct udevice *dev, unsigned offset) { return GPIOF_OUTPUT; } static int gen_74x164_xlate(struct udevice *dev, struct gpio_desc *desc, struct ofnode_phandle_args *args) { desc->offset = args->args[0]; desc->flags = args->args[1] & GPIO_ACTIVE_LOW ? GPIOD_ACTIVE_LOW : 0; return 0; } static const struct dm_gpio_ops gen_74x164_ops = { .direction_input = gen_74x164_direction_input, .direction_output = gen_74x164_direction_output, .get_value = gen_74x164_get_value, .set_value = gen_74x164_set_value, .get_function = gen_74x164_get_function, .xlate = gen_74x164_xlate, }; static int gen_74x164_probe(struct udevice *dev) { struct gen_74x164_priv *priv = dev_get_priv(dev); struct gpio_dev_priv *uc_priv = dev_get_uclass_priv(dev); char *str, name[32]; int ret; const void *fdt = gd->fdt_blob; int node = dev_of_offset(dev); snprintf(name, sizeof(name), "%s_", dev->name); str = strdup(name); if (!str) return -ENOMEM; /* * See Linux kernel: * Documentation/devicetree/bindings/gpio/gpio-74x164.txt */ priv->nregs = fdtdec_get_int(fdt, node, "registers-number", 1); priv->buffer = calloc(priv->nregs, sizeof(u8)); if (!priv->buffer) { ret = -ENOMEM; goto free_str; } ret = fdtdec_get_byte_array(fdt, node, "registers-default", priv->buffer, priv->nregs); if (ret) dev_dbg(dev, "No registers-default property\n"); ret = gpio_request_by_name(dev, "oe-gpios", 0, &priv->oe, GPIOD_IS_OUT | GPIOD_IS_OUT_ACTIVE); if (ret) { dev_dbg(dev, "No oe-pins property\n"); } uc_priv->bank_name = str; uc_priv->gpio_count = priv->nregs * 8; ret = gen_74x164_write_conf(dev); if (ret) goto free_buf; dev_dbg(dev, "%s is ready\n", dev->name); return 0; free_buf: free(priv->buffer); free_str: free(str); return ret; } static const struct udevice_id gen_74x164_ids[] = { { .compatible = "fairchild,74hc595" }, { } }; U_BOOT_DRIVER(74x164) = { .name = "74x164", .id = UCLASS_GPIO, .ops = &gen_74x164_ops, .probe = gen_74x164_probe, .priv_auto = sizeof(struct gen_74x164_priv), .of_match = gen_74x164_ids, };
// region Inner Class private class TypeMatcher implements Matcher { // region Properties private final Class<?> value; // endregion // region Constructors public TypeMatcher(Class<?> value) { this.value = value; } // endregion @Override public boolean match(Object value) { return this.value.equals(value); } }
// EnsureSuffix ensures that hte given strings ends with the specified suffix. // If the string s already ends with the suffix suffix, then it is returned unmodified, otherwise // string s with the suffix appended is returned. func EnsureSuffix(s string, suffix string) string { if strings.HasSuffix(s, suffix) { return s } return s + suffix }
/** * We should handle case when we lost snapshot tag and now it's lower than saved. * * @throws Exception if failed. */ @Test public void testThatWeDontFailIfSnapshotTagWasLost() throws Exception { ByteBuffer buf = createBuffer(); long basePageId = PageIdUtils.pageId(0, PageIdAllocator.FLAG_IDX, 1); assert basePageId >= 0; PageIO.setPageId(GridUnsafe.bufferAddress(buf), basePageId); int oldTag = 10; io.markChanged(buf, basePageId + 1, oldTag, oldTag - 1, PAGE_SIZE); for (int i = 1; i < 100; i++) io.markChanged(buf, basePageId + i, oldTag - 1, oldTag - 2, PAGE_SIZE); assertTrue(io.isCorrupted(buf)); for (int i = 1; i < 100; i++) { try { long id = basePageId + i; io.wasChanged(buf, id, oldTag - 1, oldTag - 2, PAGE_SIZE); fail(); } catch (TrackingPageIsCorruptedException ignore) { } } for (int i = 1; i < 100; i++) { long id = basePageId + i + 1000; io.markChanged(buf, id, oldTag, oldTag - 2, PAGE_SIZE); } io.resetCorruptFlag(buf); assertFalse(io.isCorrupted(buf)); for (int i = 1; i < 100; i++) { long id = basePageId + i + 1000; assertTrue(io.wasChanged(buf, id, oldTag, oldTag - 1, PAGE_SIZE)); } for (int i = 1; i < 100; i++) { long id = basePageId + i; assertFalse(io.wasChanged(buf, id, oldTag, oldTag - 1, PAGE_SIZE)); } }
package media type Base struct { ErrCode int `json:"errcode"` ErrMsg string `json:"errmsg"` } type Result struct { Base //upload article(news) Url string `json:"url"` Type string `json:"type"` //values: image, voice, video, thumb MediaID string `json:"media_id"` CreatedAt int64 `json:"created_at"` MsgID int `json:"msg_id"` MsgStatus string `json:"msg_status"` MsgDataID int `json:"msg_data_id"` SpeedLevel int `json:"speed"` RealSpeed int `json:"realspeed"` } type Response struct { Base //temporary video result VideoUrl string `json:"video_url"` //permanent video result Title string `json:"title"` Description string `json:"description"` DownUrl string `json:"down_url"` ContentType string //News type of material News []ArticleItem `json:"news_item"` //other types of material including image,voice,thumb Filename string `json:"-"` Data []byte `json:"-"` } type VideoDescription struct { Title string `json:"title"` Intro string `json:"introduction"` } type ArticleItem struct { Title string `json:"title"` ThumbMediaID string `json:"thumb_media_id"` //图文消息的封面图片素材id(必须是永久mediaID) ShowCoverPic int `json:"show_cover_pic"` //是否显示封面,0为false,即不显示,1为true,即显示 Author string `json:"author"` Digest string `json:"digest"` //图文消息的摘要,仅有单图文消息才有摘要,多图文此处为空。如果本字段为没有填写,则默认抓取正文前64个字。 Content string `json:"content"` //图文消息的具体内容,支持HTML标签,必须少于2万字符,小于1M,且此处会去除JS,涉及图片url必须来源 "上传图文消息内的图片获取URL"接口获取。外部图片url将被过滤。 URL string `json:"url"` //图文页的URL SourceURL string `json:"content_source_url"` //图文消息的原文地址,即点击“阅读原文”后的URL //fields for uploading article Comment int `json:"need_open_comment"` //Uint32 是否打开评论,0不打开,1打开 OnlyForFan int `json:"only_fans_can_comment"` //Uint32 是否粉丝才可评论,0所有人可评论,1粉丝才可评论 } type ArticleWrapper struct { Articles []ArticleItem `json:"articles"` } type ArticleUpdateWrapper struct { MediaID string `json:"media_id"` Index int `json:"index"` Article ArticleItem `json:"articles"` } type MaterialCounter struct { Voice int `json:"voice_count"` Video int `json:"video_count"` Image int `json:"image_count"` Article int `json:"news_count"` } type MaterialList struct { Base TotalCount int `json:"total_count"` ItemCount int `json:"item_count"` Items []struct { MediaID string `json:"media_id"` UpdateTime int `json:"update_time"` //Article Content struct { Articles []ArticleItem `json:"news_item"` } `json:"content"` //Image, Voice, Video Name string `json:"name"` Url string `json:"url"` } `json:"item"` }
<gh_stars>1-10 package test.org.springdoc.api.app30; import com.querydsl.core.types.Predicate; import org.springframework.data.querydsl.binding.QuerydslPredicate; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RestController; @RestController public class HelloController { @GetMapping("/") public User testQueryDslAndSpringDoc(@QuerydslPredicate(root = User.class, bindings = UserPredicate.class) Predicate predicate) { return null; } }
// Unless explicitly stated otherwise all files in this repository are licensed // under the Apache License Version 2.0. // This product includes software developed at Datadog (https://www.datadoghq.com/). // Copyright 2016-present Datadog, Inc. package metrics import ( "bytes" "context" "encoding/json" "errors" "expvar" "fmt" "github.com/gogo/protobuf/proto" jsoniter "github.com/json-iterator/go" agentpayload "github.com/DataDog/agent-payload/gogen" "github.com/DataDog/datadog-agent/pkg/serializer/marshaler" "github.com/DataDog/datadog-agent/pkg/telemetry" "github.com/DataDog/datadog-agent/pkg/util" utiljson "github.com/DataDog/datadog-agent/pkg/util/json" ) // EventPriority represents the priority of an event type EventPriority string // Enumeration of the existing event priorities, and their values const ( EventPriorityNormal EventPriority = "normal" EventPriorityLow EventPriority = "low" apiKeyJSONField = "apiKey" eventsJSONField = "events" internalHostnameJSONField = "internalHostname" outOfRangeMsg = "out of range" ) var ( eventExpvar = expvar.NewMap("Event") tlmEvent = telemetry.NewCounter("metrics", "event_split", []string{"action"}, "Events action split") ) // GetEventPriorityFromString returns the EventPriority from its string representation func GetEventPriorityFromString(val string) (EventPriority, error) { switch val { case string(EventPriorityNormal): return EventPriorityNormal, nil case string(EventPriorityLow): return EventPriorityLow, nil default: return "", fmt.Errorf("Invalid event priority: '%s'", val) } } // EventAlertType represents the alert type of an event type EventAlertType string // Enumeration of the existing event alert types, and their values const ( EventAlertTypeError EventAlertType = "error" EventAlertTypeWarning EventAlertType = "warning" EventAlertTypeInfo EventAlertType = "info" EventAlertTypeSuccess EventAlertType = "success" ) // GetAlertTypeFromString returns the EventAlertType from its string representation func GetAlertTypeFromString(val string) (EventAlertType, error) { switch val { case string(EventAlertTypeError): return EventAlertTypeError, nil case string(EventAlertTypeWarning): return EventAlertTypeWarning, nil case string(EventAlertTypeInfo): return EventAlertTypeInfo, nil case string(EventAlertTypeSuccess): return EventAlertTypeSuccess, nil default: return EventAlertTypeInfo, fmt.Errorf("Invalid alert type: '%s'", val) } } // Event holds an event (w/ serialization to DD agent 5 intake format) type Event struct { Title string `json:"msg_title"` Text string `json:"msg_text"` Ts int64 `json:"timestamp"` Priority EventPriority `json:"priority,omitempty"` Host string `json:"host"` Tags []string `json:"tags,omitempty"` AlertType EventAlertType `json:"alert_type,omitempty"` AggregationKey string `json:"aggregation_key,omitempty"` SourceTypeName string `json:"source_type_name,omitempty"` EventType string `json:"event_type,omitempty"` OriginID string `json:"-"` K8sOriginID string `json:"-"` Cardinality string `json:"-"` } // Return a JSON string or "" in case of error during the Marshaling func (e *Event) String() string { s, err := json.Marshal(e) if err != nil { return "" } return string(s) } // Events represents a list of events ready to be serialize type Events []*Event // Marshal serialize events using agent-payload definition func (events Events) Marshal() ([]byte, error) { payload := &agentpayload.EventsPayload{ Events: []*agentpayload.EventsPayload_Event{}, Metadata: &agentpayload.CommonMetadata{}, } for _, e := range events { payload.Events = append(payload.Events, &agentpayload.EventsPayload_Event{ Title: e.Title, Text: e.Text, Ts: e.Ts, Priority: string(e.Priority), Host: e.Host, Tags: e.Tags, AlertType: string(e.AlertType), AggregationKey: e.AggregationKey, SourceTypeName: e.SourceTypeName, }) } return proto.Marshal(payload) } func (events Events) getEventsBySourceType() map[string][]*Event { eventsBySourceType := make(map[string][]*Event) for _, e := range events { sourceTypeName := e.SourceTypeName if sourceTypeName == "" { sourceTypeName = "api" } eventsBySourceType[sourceTypeName] = append(eventsBySourceType[sourceTypeName], e) } return eventsBySourceType } // MarshalJSON serializes events to JSON so it can be sent to the Agent 5 intake // (we don't use the v1 event endpoint because it only supports 1 event per payload) //FIXME(olivier): to be removed when v2 endpoints are available func (events Events) MarshalJSON() ([]byte, error) { // Regroup events by their source type name eventsBySourceType := events.getEventsBySourceType() hostname, _ := util.GetHostname(context.TODO()) // Build intake payload containing events and serialize data := map[string]interface{}{ apiKeyJSONField: "", // legacy field, it isn't actually used by the backend eventsJSONField: eventsBySourceType, internalHostnameJSONField: hostname, } reqBody := &bytes.Buffer{} err := json.NewEncoder(reqBody).Encode(data) return reqBody.Bytes(), err } // SplitPayload breaks the payload into times number of pieces func (events Events) SplitPayload(times int) ([]marshaler.AbstractMarshaler, error) { eventExpvar.Add("TimesSplit", 1) tlmEvent.Inc("times_split") // An individual event cannot be split, // we can only split up the events // only split as much as possible if len(events) < times { eventExpvar.Add("EventsShorter", 1) tlmEvent.Inc("shorter") times = len(events) } splitPayloads := make([]marshaler.AbstractMarshaler, times) batchSize := len(events) / times n := 0 for i := 0; i < times; i++ { var end int // the batchSize won't be perfect, in most cases there will be more or less in the last one than the others if i < times-1 { end = n + batchSize } else { end = len(events) } newEvents := events[n:end] splitPayloads[i] = newEvents n += batchSize } return splitPayloads, nil } // MarshalSplitCompress not implemented func (events Events) MarshalSplitCompress(bufferContext *marshaler.BufferContext) ([]*[]byte, error) { return nil, fmt.Errorf("Events MarshalSplitCompress is not implemented") } // Implements StreamJSONMarshaler. // Each item in StreamJSONMarshaler is composed of all events for a specific source type name. type eventsSourceType struct { sourceType string events []*Event } type eventsBySourceTypeMarshaler struct { Events // Required to avoid implementing Marshaler methods eventsBySourceType []eventsSourceType } func (*eventsBySourceTypeMarshaler) WriteHeader(stream *jsoniter.Stream) error { writeEventsHeader(stream) return stream.Flush() } func writeEventsHeader(stream *jsoniter.Stream) { stream.WriteObjectStart() stream.WriteObjectField(apiKeyJSONField) stream.WriteString("") stream.WriteMore() stream.WriteObjectField(eventsJSONField) stream.WriteObjectStart() } func (*eventsBySourceTypeMarshaler) WriteFooter(stream *jsoniter.Stream) error { return writeEventsFooter(stream) } func writeEventsFooter(stream *jsoniter.Stream) error { stream.WriteObjectEnd() stream.WriteMore() hostname, _ := util.GetHostname(context.TODO()) stream.WriteObjectField(internalHostnameJSONField) stream.WriteString(hostname) stream.WriteObjectEnd() return stream.Flush() } func (e *eventsBySourceTypeMarshaler) WriteItem(stream *jsoniter.Stream, i int) error { if i < 0 || i > len(e.eventsBySourceType)-1 { return errors.New(outOfRangeMsg) } writer := utiljson.NewRawObjectWriter(stream) eventSourceType := e.eventsBySourceType[i] if err := writer.StartArrayField(eventSourceType.sourceType); err != nil { return err } for _, v := range eventSourceType.events { if err := writeEvent(v, writer); err != nil { return err } } return writer.FinishArrayField() } func (e *eventsBySourceTypeMarshaler) Len() int { return len(e.eventsBySourceType) } func (e *eventsBySourceTypeMarshaler) DescribeItem(i int) string { if i < 0 || i > len(e.eventsBySourceType)-1 { return outOfRangeMsg } return fmt.Sprintf("Source type: %s, events count: %d", e.eventsBySourceType[i].sourceType, len(e.eventsBySourceType[i].events)) } func writeEvent(event *Event, writer *utiljson.RawObjectWriter) error { if err := writer.StartObject(); err != nil { return err } writer.AddStringField("msg_title", event.Title, utiljson.AllowEmpty) writer.AddStringField("msg_text", event.Text, utiljson.AllowEmpty) writer.AddInt64Field("timestamp", event.Ts) writer.AddStringField("priority", string(event.Priority), utiljson.OmitEmpty) writer.AddStringField("host", event.Host, utiljson.AllowEmpty) if len(event.Tags) != 0 { if err := writer.StartArrayField("tags"); err != nil { return err } for _, tag := range event.Tags { writer.AddStringValue(tag) } if err := writer.FinishArrayField(); err != nil { return err } } writer.AddStringField("alert_type", string(event.AlertType), utiljson.OmitEmpty) writer.AddStringField("aggregation_key", event.AggregationKey, utiljson.OmitEmpty) writer.AddStringField("source_type_name", event.SourceTypeName, utiljson.OmitEmpty) writer.AddStringField("event_type", event.EventType, utiljson.OmitEmpty) if err := writer.FinishObject(); err != nil { return err } return writer.Flush() } // CreateSingleMarshaler creates marshaler.StreamJSONMarshaler where each item // is composed of all events for a specific source type name. func (events Events) CreateSingleMarshaler() marshaler.StreamJSONMarshaler { eventsBySourceType := events.getEventsBySourceType() var values []eventsSourceType for sourceType, events := range eventsBySourceType { values = append(values, eventsSourceType{sourceType, events}) } return &eventsBySourceTypeMarshaler{events, values} } // Implements a *collection* of StreamJSONMarshaler. // Each collection is composed of all events for a specific source type name. // Items returned by CreateMarshalerBySourceType can be too big. In this case, // we use a collection of StreamJSONMarshaler each by source type. type eventsMarshaler struct { sourceTypeName string Events } func (e *eventsMarshaler) WriteHeader(stream *jsoniter.Stream) error { writeEventsHeader(stream) stream.WriteObjectField(e.sourceTypeName) stream.WriteArrayStart() return stream.Flush() } func (e *eventsMarshaler) WriteFooter(stream *jsoniter.Stream) error { stream.WriteArrayEnd() return writeEventsFooter(stream) } func (e *eventsMarshaler) WriteItem(stream *jsoniter.Stream, i int) error { if i < 0 || i > len(e.Events)-1 { return errors.New(outOfRangeMsg) } event := e.Events[i] writer := utiljson.NewRawObjectWriter(stream) if err := writeEvent(event, writer); err != nil { return err } return writer.Flush() } func (e *eventsMarshaler) Len() int { return len(e.Events) } func (e *eventsMarshaler) DescribeItem(i int) string { if i < 0 || i > len(e.Events)-1 { return outOfRangeMsg } event := e.Events[i] return fmt.Sprintf("Title: %s, Text: %s, Source Type: %s", event.Title, event.Text, event.SourceTypeName) } // CreateMarshalersBySourceType creates a collection of marshaler.StreamJSONMarshaler. // Each StreamJSONMarshaler is composed of all events for a specific source type name. func (events Events) CreateMarshalersBySourceType() []marshaler.StreamJSONMarshaler { e := events.getEventsBySourceType() var values []marshaler.StreamJSONMarshaler for k, v := range e { values = append(values, &eventsMarshaler{k, v}) } // Make sure we return at least one marshaler to have non-empty JSON. if len(values) == 0 { values = append(values, &eventsBySourceTypeMarshaler{events, nil}) } return values }
def emane_link( self, session_id: int, nem_one: int, nem_two: int, linked: bool ) -> core_pb2.EmaneLinkResponse: request = core_pb2.EmaneLinkRequest( session_id=session_id, nem_one=nem_one, nem_two=nem_two, linked=linked ) return self.stub.EmaneLink(request)
/**************************************************************************** Set the user password (SamOEM version - gets plaintext). ****************************************************************************/ static BOOL api_SamOEMChangePassword(connection_struct *conn,uint16 vuid, char *param,char *data, int mdrcnt,int mprcnt, char **rdata,char **rparam, int *rdata_len,int *rparam_len) { fstring user; char *p = param + 2; *rparam_len = 2; *rparam = REALLOC(*rparam,*rparam_len); *rdata_len = 0; SSVAL(*rparam,0,NERR_badpass); if(!strequal(param + 2, "zsT")) { DEBUG(0,("api_SamOEMChangePassword: Invalid parameter string %s\n", param + 2)); return False; } p = skip_string(p, 1); if(!strequal(p, "B516B16")) { DEBUG(0,("api_SamOEMChangePassword: Invalid data parameter string %s\n", p)); return False; } p = skip_string(p,1); fstrcpy(user,p); p = skip_string(p,1); DEBUG(3,("api_SamOEMChangePassword: Change password for <%s>\n",user)); (void)map_username(user); (void)Get_Pwnam( user, True); if (pass_oem_change(user, (uchar*) data, (uchar *)&data[516], NULL, NULL)) { SSVAL(*rparam,0,NERR_Success); } return(True); }
Sydney and Melbourne will each house 7 million people by 2060; Australia might house 42 million. By then 58 per cent of the population will be in their 50s or older, and importantly 51 per cent of the voters will be that old. Australians aged 65 and over will account for one quarter of the population. One in six Australians will be aged 75 or more. Ageing population: It has been predicted that by 2060 Australians aged over 65 will account for one quarter of the population. Credit:Greg Newington Even the very old will become more numerous. A Productivity Commission research paper - An Ageing Australia: Preparing for the Future - to be released on Friday says at present Australia houses one person aged 100 or more for every 100 babies in their first year of life. By 2060 there will be 25 centenarians for every 100 babies. The commission comes up with the projections by reworking what it believes are "seriously misleading" Bureau of Statistics projections. Whereas the bureau thinks a girl born today can expect to live to 84, the commission thinks she will live to 94 years.
// Subscribe subscribes a consumer, creating it if not present from a template configuration modified by opts. Stream should exist. See nats.Subscribe // // TODO: I dont really think this kind of thing is a good idea, but its awfully verbose without it so I suspect we will need to cater for this func Subscribe(stream string, consumer string, cb func(*nats.Msg), template server.ConsumerConfig, opts ...ConsumerOption) (*nats.Subscription, error) { c, err := LoadOrNewConsumerFromDefault(stream, consumer, template, opts...) if err != nil { return nil, err } return c.Subscribe(cb) }
<filename>typings/src/Tests/YA.Subject.doct.d.ts import { ClassDoct, MemberDoct } from '../YA.doct'; export declare class SubjectTest { constructor(doc: ClassDoct); $subscribe(mdoc: MemberDoct): void; $notify(mdoc: MemberDoct): void; $unsubscribe(mdoc: MemberDoct): void; }
import { Body, Controller, Post } from '@nestjs/common'; import { StudentService } from './student.service'; import {AddStudentDto} from './dto/addStudent.dto'; import {StudentEntity} from './entities/student.entity'; @Controller('student') export class StudentController { constructor(private StudentService:StudentService) {} @Post() async addStudent( @Body() addStudentDto:AddStudentDto ): Promise<StudentEntity>{ return await this.StudentService.addStudent(addStudentDto); } }
/** * Controller for all trusted certificates views * * @author P.Piernik * */ @Component public class CertificatesController { private PKIManagement pkiMan; private MessageSource msg; CertificatesController(PKIManagement pkiMan, MessageSource msg) { this.pkiMan = pkiMan; this.msg = msg; } List<NamedCertificate> getCertificates() throws ControllerException { try { return pkiMan.getPersistedCertificates(); } catch (Exception e) { throw new ControllerException(msg.getMessage("CertificatesController.getAllError"), e); } } NamedCertificate getCertificate(String name) throws ControllerException { try { return pkiMan.getCertificate(name); } catch (Exception e) { throw new ControllerException(msg.getMessage("CertificatesController.getError", name), e); } } void addCertificate(NamedCertificate certificate) throws ControllerException { try { pkiMan.addPersistedCertificate(certificate); } catch (Exception e) { throw new ControllerException( msg.getMessage("CertificatesController.addError", certificate.name), e); } } void updateCertificate(NamedCertificate certificate) throws ControllerException { try { pkiMan.updateCertificate(certificate); } catch (Exception e) { throw new ControllerException( msg.getMessage("CertificatesController.updateError", certificate.name), e); } } void removeCertificate(NamedCertificate certificate) throws ControllerException { try { pkiMan.removeCertificate(certificate.name); } catch (Exception e) { throw new ControllerException( msg.getMessage("CertificatesController.removeError", certificate.name), e); } } }
def symbols(self) -> MultisetOfStr: symbols = Multiset() self.collect_symbols(symbols) return symbols
<filename>lib/db/db.go package db import ( "bytes" "encoding/binary" "encoding/gob" "encoding/hex" "fmt" "github.com/pvdrz/domain/lib/doc" bolt "go.etcd.io/bbolt" ) type DB struct { inner *bolt.DB } func nextID(bucket *bolt.Bucket) (doc.DocID, error) { var id doc.DocID index, err := bucket.NextSequence() if err != nil { return id, err } binary.BigEndian.PutUint64(id[:], index) return id, nil } func OpenDB(path string) (DB, error) { var db DB inner, err := bolt.Open(path, 0600, nil) if err != nil { return db, err } db.inner = inner err = db.inner.Update(func(tx *bolt.Tx) error { _, err := tx.CreateBucketIfNotExists([]byte("documents")) if err != nil { return err } _, err = tx.CreateBucketIfNotExists([]byte("hashes")) return err }) return db, err } func (db *DB) Insert(document *doc.Doc) (doc.DocID, error) { var id doc.DocID err := db.inner.Update(func(tx *bolt.Tx) error { hash := document.Hash[:] hashes := tx.Bucket([]byte("hashes")) if hashes.Get(hash) != nil { sHash := hex.EncodeToString(hash) return fmt.Errorf("the document with title \"%s\" cannot be inserted because the hash \"%s\" is already in the database", document.Title, sHash) } documents := tx.Bucket([]byte("documents")) newID, err := nextID(documents) if err != nil { return err } id = newID err = hashes.Put(hash, id[:]) if err != nil { return err } var buf bytes.Buffer enc := gob.NewEncoder(&buf) err = enc.Encode(document) if err != nil { return err } return documents.Put(id[:], buf.Bytes()) }) return id, err } func (db *DB) Get(id doc.DocID) (doc.Doc, error) { var document doc.Doc err := db.inner.View(func(tx *bolt.Tx) error { documents := tx.Bucket([]byte("documents")) bytesDoc := documents.Get(id[:]) buf := bytes.NewBuffer(bytesDoc) dec := gob.NewDecoder(buf) return dec.Decode(&document) }) return document, err } func (db *DB) Delete(id doc.DocID) error { return db.inner.Update(func(tx *bolt.Tx) error { documents := tx.Bucket([]byte("documents")) return documents.Delete(id[:]) }) } func (db *DB) ForEach(f func(doc.DocID, doc.Doc) error) error { return db.inner.View(func(tx *bolt.Tx) error { documents := tx.Bucket([]byte("documents")) return documents.ForEach(func(bytesID []byte, bytesDoc []byte) error { var id doc.DocID copy(id[:], bytesID) var document doc.Doc buf := bytes.NewBuffer(bytesDoc) dec := gob.NewDecoder(buf) err := dec.Decode(&document) if err != nil { return err } return f(id, document) }) }) }
// SPDX-License-Identifier: GPL-2.0 #include <linux/kernel.h> #include <linux/string.h> #include <linux/ctype.h> #include <asm/stacktrace.h> #include <asm/boot_data.h> #include <asm/lowcore.h> #include <asm/setup.h> #include <asm/sclp.h> #include <asm/uv.h> #include <stdarg.h> #include "boot.h" const char hex_asc[] = "0123456789abcdef"; static char *as_hex(char *dst, unsigned long val, int pad) { char *p, *end = p = dst + max(pad, (int)__fls(val | 1) / 4 + 1); for (*p-- = 0; p >= dst; val >>= 4) *p-- = hex_asc[val & 0x0f]; return end; } static char *symstart(char *p) { while (*p) p--; return p + 1; } extern char _decompressor_syms_start[], _decompressor_syms_end[]; static noinline char *findsym(unsigned long ip, unsigned short *off, unsigned short *len) { /* symbol entries are in a form "10000 c4 startup\0" */ char *a = _decompressor_syms_start; char *b = _decompressor_syms_end; unsigned long start; unsigned long size; char *pivot; char *endp; while (a < b) { pivot = symstart(a + (b - a) / 2); start = simple_strtoull(pivot, &endp, 16); size = simple_strtoull(endp + 1, &endp, 16); if (ip < start) { b = pivot; continue; } if (ip > start + size) { a = pivot + strlen(pivot) + 1; continue; } *off = ip - start; *len = size; return endp + 1; } return NULL; } static noinline char *strsym(void *ip) { static char buf[64]; unsigned short off; unsigned short len; char *p; p = findsym((unsigned long)ip, &off, &len); if (p) { strncpy(buf, p, sizeof(buf)); /* reserve 15 bytes for offset/len in symbol+0x1234/0x1234 */ p = buf + strnlen(buf, sizeof(buf) - 15); strcpy(p, "+0x"); p = as_hex(p + 3, off, 0); strcpy(p, "/0x"); as_hex(p + 3, len, 0); } else { as_hex(buf, (unsigned long)ip, 16); } return buf; } void decompressor_printk(const char *fmt, ...) { char buf[1024] = { 0 }; char *end = buf + sizeof(buf) - 1; /* make sure buf is 0 terminated */ unsigned long pad; char *p = buf; va_list args; va_start(args, fmt); for (; p < end && *fmt; fmt++) { if (*fmt != '%') { *p++ = *fmt; continue; } pad = isdigit(*++fmt) ? simple_strtol(fmt, (char **)&fmt, 10) : 0; switch (*fmt) { case 's': p = buf + strlcat(buf, va_arg(args, char *), sizeof(buf)); break; case 'p': if (*++fmt != 'S') goto out; p = buf + strlcat(buf, strsym(va_arg(args, void *)), sizeof(buf)); break; case 'l': if (*++fmt != 'x' || end - p <= max(sizeof(long) * 2, pad)) goto out; p = as_hex(p, va_arg(args, unsigned long), pad); break; case 'x': if (end - p <= max(sizeof(int) * 2, pad)) goto out; p = as_hex(p, va_arg(args, unsigned int), pad); break; default: goto out; } } out: va_end(args); sclp_early_printk(buf); } static noinline void print_stacktrace(void) { struct stack_info boot_stack = { STACK_TYPE_TASK, BOOT_STACK_OFFSET, BOOT_STACK_OFFSET + BOOT_STACK_SIZE }; unsigned long sp = S390_lowcore.gpregs_save_area[15]; bool first = true; decompressor_printk("Call Trace:\n"); while (!(sp & 0x7) && on_stack(&boot_stack, sp, sizeof(struct stack_frame))) { struct stack_frame *sf = (struct stack_frame *)sp; decompressor_printk(first ? "(sp:%016lx [<%016lx>] %pS)\n" : " sp:%016lx [<%016lx>] %pS\n", sp, sf->gprs[8], (void *)sf->gprs[8]); if (sf->back_chain <= sp) break; sp = sf->back_chain; first = false; } } void print_pgm_check_info(void) { unsigned long *gpregs = (unsigned long *)S390_lowcore.gpregs_save_area; struct psw_bits *psw = &psw_bits(S390_lowcore.psw_save_area); decompressor_printk("Linux version %s\n", kernel_version); if (!is_prot_virt_guest() && early_command_line[0]) decompressor_printk("Kernel command line: %s\n", early_command_line); decompressor_printk("Kernel fault: interruption code %04x ilc:%x\n", S390_lowcore.pgm_code, S390_lowcore.pgm_ilc >> 1); if (kaslr_enabled) decompressor_printk("Kernel random base: %lx\n", __kaslr_offset); decompressor_printk("PSW : %016lx %016lx (%pS)\n", S390_lowcore.psw_save_area.mask, S390_lowcore.psw_save_area.addr, (void *)S390_lowcore.psw_save_area.addr); decompressor_printk( " R:%x T:%x IO:%x EX:%x Key:%x M:%x W:%x P:%x AS:%x CC:%x PM:%x RI:%x EA:%x\n", psw->per, psw->dat, psw->io, psw->ext, psw->key, psw->mcheck, psw->wait, psw->pstate, psw->as, psw->cc, psw->pm, psw->ri, psw->eaba); decompressor_printk("GPRS: %016lx %016lx %016lx %016lx\n", gpregs[0], gpregs[1], gpregs[2], gpregs[3]); decompressor_printk(" %016lx %016lx %016lx %016lx\n", gpregs[4], gpregs[5], gpregs[6], gpregs[7]); decompressor_printk(" %016lx %016lx %016lx %016lx\n", gpregs[8], gpregs[9], gpregs[10], gpregs[11]); decompressor_printk(" %016lx %016lx %016lx %016lx\n", gpregs[12], gpregs[13], gpregs[14], gpregs[15]); print_stacktrace(); decompressor_printk("Last Breaking-Event-Address:\n"); decompressor_printk(" [<%016lx>] %pS\n", (unsigned long)S390_lowcore.breaking_event_addr, (void *)S390_lowcore.breaking_event_addr); }
Singular Integrals Along N Directions in R^2 We prove optimal bounds in L^2(R^2) for the maximal oper- ator obtained by taking a singular integral along N arbitrary directions in the plane. We also give a new proof for the optimal L^2 bound for the single scale Kakeya maximal function. Introduction Our main result is the following: log N. In Section 3 we also prove that the bound log N is optimal. It is well known (see for example Proposition 2, page 245 in ) that the distribution m coincides with a function K outside the origin, which means that if f is compactly supported and (x, y) is outside the support of f , then we also have the following kernel representation Thus, Theorem 1.1 is the singular integral analogue of the following bound for the Kakeya maximal function due to Nets Katz: In section 4 we give a new proof for the following fact proved in Theorem 1.3. Let Σ N a set of N unit vectors in R 2 . Define the single scale Kakeya operator The proofs in both and rely on stopping time arguments similar to John Nirenberg's inequality in the context of product BMO. Perhaps surprisingly, our arguments in this paper avoid product theory, and are essentially L 2 based. The proof of the Theorem 1.1 is very similar to the proof of Theorem 1.1. in , in particular it relies on the deep inequality of Chang, Wilson and Wolff. The second major ingredient we use is a result of Lacey and Li for single annulus operators, which is essentially equivalent with Carleson's theorem . In the case for the operator immediately follows from the Rademacher-Menshov theorem. We do not see a way to extend this type of approach to the general case. We would like to thank Zubin Gautam, Nets Katz and Francesco Di Plinio for stimulating discussions on the subject. Discrete versus continuous square function To simplify notation, we will often replace (x, y) with x. Recall the conditional expectation with respect to the σ-algebra consisting of dyadic squares of side length 2 −j , and let ∆ j = E j+1 − E j be the martingale difference. Denote by the discrete square function. We will need the following fundamental inequality. For each linear bounded operator T : We need the following variant of Lemma 3.1 from . Lemma 2.2. Let T be a linear bounded multiplier operator T : . Then there exists c 3 > 0 independent of N, T and x such that for almost every x ∈ R 2 and each f ∈ L 2 (R 2 ) Sublemma 4.2 from implies that (uniformly) for each x Since also |B k+n L k+n g(x)| Mg(x), uniformly over k, n, the result follows from the triangle inequality. Lemma 2.3. Let T be a linear bounded multiplier operator T : L 2 (R 2 ) → L 2 (R 2 ) associated with multiplier m. Assume in addition that m is zero on the ball with radius 2 2N . Then there exists c 4 > 0 independent of N, T and x such that for almost every x ∈ R 2 and each f ∈ L 2 (R 2 ) Proof The argument follows as in the previous lemma, by using the fact that (part of Sublemma 4.2 in ) for each n ≥ 0. Proof of the Theorem 1.1 We have proved in the previous section that The following Lemma is a variant of similar lemmas from and . Lemma 3.1. Let T be a sublinear operator on some measure space (X, m). Assume Proof It suffices to prove that for each k It was proved in that A k maps L 2 to L 2,∞ and L p to itself for 2 < p < ∞, with implicit bounds independent of N. That is a deep result, essentially equivalent with Carleson's theorem . Since each T v maps L 1 to L 1,∞ , The result now follows from Lemma 3.1 above. Proof By using a limiting argument, we can assume that m is supported in a finite union of dyadic annuli 2 j ≤ |ξ| < 2 j+1 . By rescaling, we can assume m is actually supported away from the ball of radius 2 2N (for example). Let ǫ N = √ c 1 log N . For each λ > 0, By Lemma 2.1, Using this, Lemma 2.3 and Proposition 3.2, we get that Theorem 1.1 now follows. The following result shows that the bound in Theorem 1.1 is optimal. Proof This is a standard example, we only briefly sketch the details. Define f (x, y) = 1 (x,y) 1 1≤ (x,y) ≤N/100 . Let 100 ≤ (x, y) ≤ N/100. Let w ∈ Σ N be such that v − w ≤ 10 N . It easily follows that simply from the fact that tv − tw ≤ 1/5 if |t| < N/50 and the fact that and hence T * N f 2 (log N) 3/2 . Since f 2 (log N) 1/2 , the Proposition follows. Proof of Theorem 1.3 First, let us remark that there is a proof along the lines of the argument for Theorem 1.1. We however choose to give a different, self contained argument, one that in particular does not appeal to the Chang-Wilson-Wolff inequality. It suffices to prove the bound for where ψ is a positive function such thatψ is supported in . Here F 0 is the Fourier restriction to the unit ball B (that is, F 0 =F 1 B ), while F n the Fourier restriction to the annulus 2 n−1 ≤ |(ξ, η)| ≤ 2 n . Note that where M is the Hardy-Littlewood maximal function, and is smooth. This is since K v (x, y) := (φ(ξ, η)ψ(v 1 ξ +v 2 η))(x, y) easily satisfies with bound independent of v, and M 0 F 0 (x, y) = sup v∈Σ N |F 0 * K v (x, y)|. Next, we analyze the case n ≥ 1. Definition 4.1 (Grids). A collection of intervals on the unit circle S 1 is called a grid if for any two intervals from the collection that intersect, one of them should contain the other one. The standard dyadic grid G 0 is the collection dyadic intervals on S 1 , obtained by identifying S 1 with
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_SIGNIN_SIGNIN_GLOBAL_ERROR_H_ #define CHROME_BROWSER_SIGNIN_SIGNIN_GLOBAL_ERROR_H_ #include <set> #include "base/compiler_specific.h" #include "base/gtest_prod_util.h" #include "base/macros.h" #include "chrome/browser/ui/global_error/global_error.h" #include "components/keyed_service/core/keyed_service.h" #include "components/signin/core/browser/signin_error_controller.h" class Profile; // Shows auth errors on the wrench menu using a bubble view and a menu item. class SigninGlobalError : public GlobalErrorWithStandardBubble, public SigninErrorController::Observer, public KeyedService { public: SigninGlobalError(SigninErrorController* error_controller, Profile* profile); ~SigninGlobalError() override; // Returns true if there is an authentication error. bool HasError(); // Shows re-authentication UI to the user in an attempt to fix the error. // The re-authentication UI will be shown in |browser|. void AttemptToFixError(Browser* browser); private: FRIEND_TEST_ALL_PREFIXES(SigninGlobalErrorTest, Basic); FRIEND_TEST_ALL_PREFIXES(SigninGlobalErrorTest, AuthStatusEnumerateAllErrors); // KeyedService: void Shutdown() override; // GlobalErrorWithStandardBubble: bool HasMenuItem() override; int MenuItemCommandID() override; base::string16 MenuItemLabel() override; void ExecuteMenuItem(Browser* browser) override; bool HasBubbleView() override; base::string16 GetBubbleViewTitle() override; std::vector<base::string16> GetBubbleViewMessages() override; base::string16 GetBubbleViewAcceptButtonLabel() override; base::string16 GetBubbleViewCancelButtonLabel() override; void OnBubbleViewDidClose(Browser* browser) override; void BubbleViewAcceptButtonPressed(Browser* browser) override; void BubbleViewCancelButtonPressed(Browser* browser) override; // SigninErrorController::Observer: void OnErrorChanged() override; // The Profile this service belongs to. Profile* profile_; // The SigninErrorController that provides auth status. SigninErrorController* error_controller_; DISALLOW_COPY_AND_ASSIGN(SigninGlobalError); }; #endif // CHROME_BROWSER_SIGNIN_SIGNIN_GLOBAL_ERROR_H_
// 1. shift a KonValue // 2. mark this value to black // 3. add all children to this queue void KN_Mark(KonState* kstate, KxList* taskQueue, char color) { while (KxList_Length(kstate->markTaskQueue) > 0) { KonBase* konPtr = KxList_Shift(taskQueue); KN_MarkNode(konPtr, taskQueue, color); } }
/** Sleep for the given number of milliseconds + nanoseconds. This is purely for mocking for tests. */ default void sleep(long millis, int nanos) { try { Thread.sleep(millis, nanos); } catch (InterruptedException ignored) {} }
/** * Determines whether an encounter should be telemedicine, with different chances before and * after the start of the COVID-19 pandemic. * @param person source of randomness * @param time current time in the simulation * @return true if the encounter should be virtual */ public static boolean shouldEncounterBeVirtual(Person person, long time) { if (time < TELEMEDICINE_START_DATE) { return false; } if (time < TELEMEDICINE_UPTAKE_DATE) { return person.rand() <= PREPANDEMIC_CHANCE; } else { return person.rand() <= CURRENT_CHANCE; } }
<gh_stars>1-10 package eu.unicore.security; import java.io.Serializable; /** * Holds authentication data in simple form. * @author <NAME> */ public class HTTPAuthNTokens implements Serializable { private static final long serialVersionUID = 3425680289291775268L; private String userName; private transient String passwd; public HTTPAuthNTokens(String userName, String passwd) { super(); this.userName = userName; this.passwd = passwd; } public String getPasswd() { return passwd; } public String getUserName() { return userName; } }
def main(input_file): bad_chars = string.punctuation + string.digits table = dict([(ord(c), ' ') for c in bad_chars]) with open(input_file, "r") as infile: for line in infile: line = line.translate(table) line_clean = line.lower().split() print(*line_clean, sep=" ")
""" Contains functions for assessing length of daily summaries for predicting timelines. The functions take as input a groundtruth object (representing reference timelines) and output a number. """ import math def constant_length_2(groundtruth): """ Always returns 2. Params: groundtruth (Groundtruth): Reference timelines. Returns: The number 2. """ return 2 def average_length_in_sentences(groundtruth): """ Returns the average length of all daily summaries (in sentences). The average ist first computed over all summaries in each timeline in `groundtruth`. Then the average over all averages obtained in this way is computed. Params: groundtruth (Groundtruth): Reference timelines. Returns: Average daily sumamry length. """ all_avgs = [] for tl in groundtruth.timelines: all_avgs.append(sum(len(x) for x in tl.dates_to_summaries.values()) / len(tl.get_dates())) return math.floor(sum(all_avgs) / len(all_avgs)) def max_length_in_sentences(groundtruth): """ Returns maximum length over all daily summaries (in sentences). Params: groundtruth (Groundtruth): Reference timelines. Returns: Maximum daily sumamry length. """ all_maxs = [] for tl in groundtruth.timelines: all_maxs.append(max(len(x) for x in tl.dates_to_summaries.values())) return max(all_maxs)
Pilot Study of an Individualised Early Postpartum Intervention to Increase Physical Activity in Women with Previous Gestational Diabetes Optimal strategies to prevent progression towards overt diabetes in women with recent gestational diabetes remain ill defined. We report a pilot study of a convenient, home based exercise program with telephone support, suited to the early post-partum period. Twenty eight women with recent gestational diabetes were enrolled at six weeks post-partum into a 12 week randomised controlled trial of Usual Care (n = 13) versus Supported Care (individualised exercise program with regular telephone support; n = 15). Baseline characteristics (Mean ± SD) were: Age  33 ± 4  years; Weight 80 ± 20 kg and Body Mass Index (BMI) 30.0 ± 9.7 kg/m2. The primary outcome, planned physical activity {Median (Range)}, increased by 60 (0–540) mins/week in the SC group versus 0 (0–580) mins/week in the UC group (P = 0.234). Walking was the predominant physical activity. Body weight, BMI, waist circumference, % body fat, fasting glucose and insulin did not change significantly over time in either group. This intervention designed to increase physical activity in post-partum women with previous gestational diabetes proved feasible. However, no measurable improvement in metabolic or biometric parameters was observed over a three month period. Background Strategies to prevent the progression from impaired glucose tolerance to overt (principally type 2) diabetes in middleaged and older adults have been developed by a number of groups worldwide, drawing on the results of major randomised controlled trials . Women with previous gestational diabetes (GDM) are known to be at high risk of progression to type 2 diabetes . However, strategies for diabetes prevention for women with previous GDM in the period immediately following pregnancy are less well defined. The TRIPOD and PIPOD studies demonstrated that thiazolidinedione (TZD) therapy could delay progression to diabetes in a high risk group of women. Some benefits have also been suggested for metformin by Ratner and colleagues in women with previous GDM (mean age at study entry 43 years) who participated in the Diabetes Prevention Program (DPP). In women with previous GDM, metformin led to a 50% reduction of the risk of progression from impaired glucose tolerance to overt diabetes, whereas lifestyle intervention was associated with a 53% risk reduction. However, medication-based strategies may not be appropriate for women of child-bearing age and are unlikely to be feasible or desirable on a broader scale. Anecdotally, the pressures of caring for a new baby tend to dominate the early postpartum period, with Australian women potentially experiencing difficulty focusing on their own long-term health, and specifically their exercise, in this context. This belief is supported by a recent qualitative study conducted in the USA that found that having young children/child was a major barrier to an active lifestyle in the first 12 months postpartum . Our recent work has 2 International Journal of Endocrinology demonstrated that women with previous GDM frequently have ongoing deficits in health promoting physical activity. By contrast, the recent findings of Retnakaran et al. were more positive, suggesting some improvement in physical activity following a GDM pregnancy. Changes in lifestyle patterns at this time might potentially prove to be valuable in preventing longer term progression towards diabetes, as well as influencing the woman's entire family towards adopting health promoting behaviours. However, Cheung et al. have reported little success with a group intervention that used patient-centred counselling or more recently with a pedometer-linked programme . In contrast, several intervention studies based on the Social Cognitive Theory have demonstrated success in increasing and even maintaining physical activity among individuals with type 2 diabetes . This pilot study sought to evaluate the feasibility and efficacy of an individualised programme, based on the social cognitive theory, to assist women to be more physically active in the early post natal period. Research Design and Methods The protocol was approved by Hospital and University Human Research Ethics Committees. Participants consented in writing after appropriate verbal and written explanation of the study. The study was registered with the Australian and New Zealand Clinical Trials Registry: ACTRN 12608000280303. Seventy-two women were approached to join the trial prior to six weeks postpartum. Forty-three women refused participation and one was excluded due to detection of overt diabetes on the entry oral glucose tolerance test (OGTT), leaving 28 randomised participants. At six weeks after delivery of the index pregnancy complicated by GDM, participants underwent baseline assessment. Parameters assessed included a 75 g OGTT, fasting insulin, body weight and height using standardised instruments, and body composition using bioimpedance methodology. Insulin resistance was estimated using the HOMA-IR equation (HOMA-IR = Fasting insulin (μU/mL) × Fasting glucose (mmol/L)/22.5). Physical Activity was assessed using the validated Australian Women's Activity Survey . Women were then randomly assigned to one of two groups. The Usual Care group ("UC", n = 13) received brief printed materials outlining the importance of diet and exercise for the prevention of future diabetes. The Supported Care ("SC", n = 15) group underwent an initial face-toface consultation with an exercise physiologist where specific, individualised goals for initiating and maintaining regular health-enhancing physical activity were developed. Consistent with current physical activity guidelines a physical activity target of 150 mins/wk was set, to be achieved gradually over the 12 weeks intervention through activities acceptable to the individual. The exercise physiologist contacted each woman in the SC group weekly by telephone for the next four weeks and then every 2 weeks thereafter to assess progress, promote accountability, and to provide tailored expert support for recognising and overcoming experienced constraints to physical activity behaviour change. Twelve weeks following baseline assessment (total 18 weeks postpartum), both groups underwent repeat examinations as noted above, except that samples for fasting glucose and insulin alone were taken without a repeat OGTT. The primary outcome measure was change in self-reported physical activity. Secondary outcomes were change in insulin resistance (HOMA-IR), change in weight, and changes in body composition. Statistical analyses were performed using data from those women who completed both assessments n = 11 "UC" and n = 14 "SC" women. All comparisons between the UC and SC groups consider differences between these groups in the change or "Delta" (Delta = Value 18 weeks post partum − Value 6 weeks post partum ) in each variable between six and 18 weeks postpartum. Statistical comparisons have been performed using unpaired t-tests for normally distributed variables and Mann Whitney U tests for nonnormally distributed variables. Categorical variables were analysed using Fisher's exact test due to small cell sizes. Significance was accepted at the 5% level for two-tailed analysis for all variables. Results Typical of an Australian GDM cohort, the women were generally in their early thirties and their mean body mass index (BMI) at six weeks postpartum was in the obese range. Importantly there were no significant differences between the study groups demographic, physical activity or insulin resistance at baseline (see Table 1). Two UC and one SC participant dropped out of the study prior to the follow-up assessment. Consistent with previous studies, the physical activity data were nonnormally distributed (see Table 2). Median (range) planned physical activity increased by 60 (0-540) mins/wk in the SC group versus 0 (0-580) mins/wk in the UC group, but this change was not statistically significant (P = 0.234, Mann-Whitney U test). The change observed in the SC group's physical activity comprised mostly increased planned walking. A pre defined categorical analysis examined differences between SC and UC groups in the proportion of women increasing planned physical activity by >60 mins/wk; 67% of women who received SC achieved this criterion compared to 31% of women who received UC. Despite this, most women regardless of group allocation failed to reach the recommended physical activity level of 150 mins/wk (see Table 2). Metabolic assessments revealed no changes in weight or insulin resistance in either group (see Table 2). Body composition (% lean mass, % fat mass) was also unchanged. Breast feeding status (full/partial/nil) was also noted at six and 18 weeks postpartum. Weight loss and other metabolic parameters did not differ between breastfeeding groups. Open-ended feedback regarding the intervention programme was obtained from the SC group. Whilst most women responded positively to the programme, many commented that the starting point of six weeks postpartum was Discussion This pilot study was designed to evaluate and refine a potential early postpartum intervention designed to increase physical activity in women with previous gestational diabetes for future dissemination and evaluation. Our findings suggest that a postpartum programme designed to encourage and assist women with prior GDM to be more physically active is feasible. Specific strengths of our study included the randomized design and good overall retention of participants. Weaknesses included relatively poor recruitment rates, anecdotally contributed to by the predominance of "baby-related" concerns in early postpartum period, short duration of followup, and small total study cohort. As noted in Tables 1 and 2, there was great variability in physical activity both at baseline and at followup, with many women reporting essentially zero planned physical activity. The variance in all biophysical study measures was large, in particular for HOMA-IR where the standard deviations approached or exceeded the mean values. In designing future studies, it may be worthwhile to stratify women according to BMI at entry, as this is likely to be a major factor influencing the degree of insulin resistance. Notwithstanding the timing of commencement, the intervention was well received. Anecdotally, women were happy that their potential health problems were being addressed in an organised programme. Although changes in physical activity between groups did not reach statistical significance, the proportion of women increasing their physical activity by >60 mins/wk in the SC group was twice that of women in the UC group. If confirmed in a larger study sample and maintained over a longer period of time, this would provide significant health benefits . Commencement of programmes designed to increase physical activity in the early postpartum period has some potential advantages in terms of capitalizing on the increased motivation often seen in pregnancy. However, the focus of attention frequently shifts to the baby at this stage, making alterations in ingrained maternal behaviours potentially difficult to achieve. The emotional stress of adapting to a new baby and the fear of receiving a diagnosis of diabetes are key barriers to follow-up care for GDM . As noted previously, other studies of interventions in the postpartum period have met with limited and variable success and the optimal timing and content of postpartum programmes remains undefined. Group-based programmes may help increase motivation but achieving "buy in" and maintaining participation appears challenging. Despite the theoretical advantage of commencing diabetes prevention at an earlier stage of pathophysiology, practical barriers may make women more resistant to change at this stage of the life cycle. Despite some evidence of increased physical activity, measures of glucose metabolism were not altered by this intervention over a three-month period. This was not unexpected given the small sample size and short duration of the study, but we noted absolutely no trends in favour of metabolic improvement. Weight and body composition were also unchanged. Although early postpartum breast feeding status did not appear to influence our findings, the potential importance of breast feeding in longer term diabetes prevention has also been noted in a recent review . Alternatively, one could argue in favour of pharmacologic prevention of progression towards diabetes following GDM, citing the results of the TRIPOD , PIPOD , and DPP studies. However, thiazolidinediones are rapidly disappearing from the pharmacotherapy of type 2 diabetes due to an unfavourable risk/benefit profile and their potential use in diabetes prevention appears severely limited. Metformin was reported to be equally efficacious as an intensive diet/lifestyle programme in women with previous GDM who participated in the DPP , but this finding relates to much older women (mean age 46 years at study entry), rather than those in the early postpartum period. For large scale intervention, lifestyle measures appear intrinsically more attractive, though metformin may still deserve consideration in those struggling to make effective lifestyle changes. Further research is warranted to improve the physical activity levels and general health of women with previous GDM. We suggest that studies combining physical activity and dietary interventions may potentially offer greater benefits and we are currently planning such studies, using the pilot data reported in this paper. We hope that our findings will also assist other researchers in determining in the design and conduct of more definitive studies, in particular by allowing pre hoc power calculations to be performed in a more robust fashion.
<filename>lumino/Graphics/include/LuminoGraphics/Rendering/FeatureRenderer/PrimitiveMeshRenderer.hpp #pragma once #include "../Common.hpp" namespace ln { class PrimitiveMeshRenderer : public Object { public: static PrimitiveMeshRenderer* get(); void begin(RenderingContext* renderingContext, Material* material); void begin(CommandList* commandList, Material* material); void end(); void drawPlane(float width, float depth, const Color& color = Color::White); void drawPlane(float width, float depth, const Vector2& uv1, const Vector2& uv2, const Color& color = Color::White); void drawSphere(float radius, int slices, int stacks, const Color& color, const Matrix& localTransform = Matrix()); void drawBox(const Box& box, const Color& color = Color::White, const Matrix& localTransform = Matrix()); static void drawPlane(RenderingContext* context, Material* material, float width, float depth, const Color& color = Color::White) { auto* r = get(); r->begin(context, material); r->drawPlane(width, depth, color); r->end(); } static void drawPlane(RenderingContext* context, Material* material, float width, float depth, const Vector2& uv1, const Vector2& uv2, const Color& color = Color::White) { auto* r = get(); r->begin(context, material); r->drawPlane(width, depth, uv1, uv2, color); r->end(); } static void drawSphere(RenderingContext* context, Material* material, float radius, int slices, int stacks, const Color& color, const Matrix& localTransform = Matrix()) { auto* r = get(); r->begin(context, material); r->drawSphere(radius, slices, stacks, color, localTransform); r->end(); } static void drawBox(RenderingContext* context, Material* material, const Box& box, const Color& color = Color::White, const Matrix& localTransform = Matrix()) { auto* r = get(); r->begin(context, material); r->drawBox(box, color, localTransform); r->end(); } private: PrimitiveMeshRenderer(); Result init(); CommandList* m_commandList; Material* m_material; Array<detail::MeshGenerater*> m_generators; friend class detail::RenderingManager; }; } // namespace ln
<filename>checksubjectschema_test.go package schemaregistry_test import ( "encoding/json" "net/http" "net/http/httptest" "testing" "github.com/larixsource/go-schema-registry" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) const testSchema = `{ "type": "record", "name": "Frame", "fields": [ { "name": "data", "type": "bytes" } ] }` func TestRegistry_CheckSubjectSchemaOK(t *testing.T) { t.Parallel() ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "/subjects/frames-value", r.URL.String()) assert.Equal(t, "application/vnd.schemaregistry.v1+json", r.Header.Get("Content-Type")) var msg map[string]string err := json.NewDecoder(r.Body).Decode(&msg) require.Nil(t, err) assert.Equal(t, testSchema, msg["schema"]) json.NewEncoder(w).Encode(schemaregistry.SubjectSchema{ Subject: "frames-value", ID: 1, Version: 2, Schema: testSchema, }) })) defer ts.Close() registry, err := schemaregistry.New(ts.URL) require.Nil(t, err) ss, err := registry.CheckSubjectSchema("frames-value", testSchema) require.Nil(t, err) assert.Equal(t, "frames-value", ss.Subject) assert.Equal(t, 1, ss.ID) assert.Equal(t, 2, ss.Version) assert.Equal(t, testSchema, ss.Schema) } func TestRegistry_CheckSubjectSchemaErrSubjectNotFound(t *testing.T) { t.Parallel() ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "/subjects/frames-value", r.URL.String()) assert.Equal(t, "application/vnd.schemaregistry.v1+json", r.Header.Get("Content-Type")) var msg map[string]string err := json.NewDecoder(r.Body).Decode(&msg) require.Nil(t, err) assert.Equal(t, testSchema, msg["schema"]) w.WriteHeader(http.StatusNotFound) json.NewEncoder(w).Encode(&schemaregistry.APIError{ Code: schemaregistry.SubjectNotFound, Message: "Subject not found", }) })) defer ts.Close() registry, err := schemaregistry.New(ts.URL) require.Nil(t, err) _, err = registry.CheckSubjectSchema("frames-value", testSchema) apiErr, ok := err.(*schemaregistry.APIError) require.True(t, ok) assert.Equal(t, schemaregistry.SubjectNotFound, apiErr.Code) assert.Equal(t, "Subject not found", apiErr.Message) } func TestRegistry_CheckSubjectSchemaErrSchemaNotFound(t *testing.T) { t.Parallel() ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "/subjects/frames-value", r.URL.String()) assert.Equal(t, "application/vnd.schemaregistry.v1+json", r.Header.Get("Content-Type")) var msg map[string]string err := json.NewDecoder(r.Body).Decode(&msg) require.Nil(t, err) assert.Equal(t, testSchema, msg["schema"]) w.WriteHeader(http.StatusNotFound) json.NewEncoder(w).Encode(&schemaregistry.APIError{ Code: schemaregistry.SchemaNotFound, Message: "Schema not found", }) })) defer ts.Close() registry, err := schemaregistry.New(ts.URL) require.Nil(t, err) _, err = registry.CheckSubjectSchema("frames-value", testSchema) apiErr, ok := err.(*schemaregistry.APIError) require.True(t, ok) assert.Equal(t, schemaregistry.SchemaNotFound, apiErr.Code) assert.Equal(t, "Schema not found", apiErr.Message) } func TestRegistry_CheckSubjectSchemaErrInternalServer(t *testing.T) { t.Parallel() ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "/subjects/frames-value", r.URL.String()) assert.Equal(t, "application/vnd.schemaregistry.v1+json", r.Header.Get("Content-Type")) var msg map[string]string err := json.NewDecoder(r.Body).Decode(&msg) require.Nil(t, err) assert.Equal(t, testSchema, msg["schema"]) w.WriteHeader(http.StatusInternalServerError) json.NewEncoder(w).Encode(&schemaregistry.APIError{ Code: 500, Message: "Internal server error", }) })) defer ts.Close() registry, err := schemaregistry.New(ts.URL) require.Nil(t, err) _, err = registry.CheckSubjectSchema("frames-value", testSchema) apiErr, ok := err.(*schemaregistry.APIError) require.True(t, ok) assert.EqualValues(t, 500, apiErr.Code) assert.Equal(t, "Internal server error", apiErr.Message) }
Lesions of endodontic origin: An emerging risk factor for coronary heart diseases A high inflammatory state, such as atherosclerosis, is a major underlying cause of coronary heart diseases (CHDs). Inflammatory mediators are known to lead to endothelial dysfunction and play a key role in initiation, progression, and rupture of atherothrombotic plaque. Chronic inflammatory dental infections such as periodontitis and lesions of endodontic origin or chronic apical periodontitis (CAP) may provide an environment conducive for such events. Atherosclerosis has shown to share a common spectrum of inflammatory markers with apical periodontitis. The possible correlation between CHD and CAP is emerging at microbiological, clinical, inflammatory, and molecular levels. This less recognized fact should be discussed more among the dental and medical fraternity so that more awareness and positive approach toward oral health can be created among patients and health-care providers. Introduction Atherosclerosis, which is a condition characterized by a high inflammatory state, is a major underlying cause of coronary heart disease (CHD), as it may precipitate myocardial infarction, stroke, or peripheral vascular disease. The disease follows a somewhat silent clinical course, and the first clinical symptom often arises at a welladvanced stage. 1,2 Inflammatory mediators are known to lead to endothelial dysfunction and play a key role in initiation, progression, and rupture of atherothrombotic plaque. Long-standing chronic inflammatory state anywhere in the body is a known contributing factor for many degenerative diseases, and chronic inflammatory dental infections such as dental caries, periodontitis, and endodontic lesions may provide an environment conducive for such events. The microbe-induced inflammatory reaction of the oral connective tissue causes a symptomatic vasodilatation, resulting in an increased permeability of the endothelium that leads to the migration of leukocytes in the perivascular region and also the foray of bacteria. 3 1.1. Inflammatory conditions of gingiva and teeth 1. 1 .1. Periodontitis Periodontitis is defined as "an inflammatory disease of supporting tissues of teeth, i.e., the gingiva and the alveolar bone, caused by specific microorganisms or groups of specific microorganisms, resulting in progressive destruction of the periodontal ligament and alveolar bone with periodontal pocket formation, gingival recession, or both." 4 This inflammatory disease of the supporting tissues of teeth has long been linked to systemic diseases with high inflammatory quotient such as type 2 diabetes mellitus and CHD. 5,6 Many casecontrolled, cross-sectional, and cohort studies have reported an association between periodontitis and increased cardiovascular, cerebrovascular, and peripheral artery disease, as determined by clinical disease, angiography, ultrasonography, and reduced flowmediated dilation. 7,8 Lesions of endodontic origin (chronic apical periodontitis) Lesions of endodontic origin or apical periodontitis may be defined as "acute or chronic inflammatory lesion around the apex of a tooth caused by bacterial infection of the pulp canal system" 9 and usually presents in the presence or after restoration of deep caries lesions or fractured teeth. 10 Although the etiology for both conditions is different, this condition bears some similarities to chronic periodontal inflammatory disease, viz., similar pathogenic gramnegative microflora and a visible rise in systemic cytokine levels in both the clinical situations. 11,12 Thus, the systemic effects related to periodontitis may be applicable for lesions of endodontic origin too. An overabundant production of localized or systemic inflammatory mediators is seen in response to gram-negative bacteria in certain individuals, which may in turn lead to vascular or cardiovascular damage. 13,14 Detection of Streptococcus mutans, a major bacterial etiopathogen for dental caries in atherogenic plaque is also suggestive of a possible proatherogenic potential of dental caries. 15 Moreover, a recent multicentric study has shown that the prevalence of teeth with radiographic evidence of apical periodontitis is as high as 65% in Indian urban population. Thus, it is all the more important to recognize and address the effects of endodontic lesions or apical periodontitis on the systemic health, especially relating to the cardiovascular diseases. 16 Although studies have shown that CHD and periodontitis are associated independently with the classical risk factors such as arterial hypertension, diabetes mellitus, smoking, and hypertriglyceridemia, only a few studies have explored the potential association between chronic apical periodontitis (CAP) and CHD 13,17 (See Table 1). Thus, the aim of the present study is to present an overview of work done pertaining to the association of CAP and its treatment with atherosclerosis and CHD. Search strategy Related literature was searched using the MEDLINE/PubMed database, online Cochrane library, and Google scholar, with an emphasis on peer-reviewed dental journals till June 2017. The databases were searched by using keywords "coronary heart diseases" AND "chronic apical periodontitis" AND "atherosclerosis." Only articles in English language were included, but no other restrictions were applied. In the search engine MEDLINE/PubMed, the MeSH term "coronary heart disease" revealed 295,084 results, and "atherosclerosis" revealed 12,689 results. On further filtering, "chronic apical periodontitis and coronary heart disease" revealed 12 items, and "chronic apical periodontitis and atherosclerosis" revealed 9 items. Literature review The present review attempts to analyze various published reviews, cross-sectional studies, and case-control studies relating to the association of CAP and CHD. The interrelationship of chronic infectious oral conditions of infectious diseases and systemic health is one of the much intriguing, yet less discussed, aspects of complete patient rehabilitation challenging the medical fraternity. 18,19 Several researchers have reported a positive correlation between chronic, inflammatory oral diseases such as chronic periodontitis and systemic conditions, viz., coronary artery disease (CAD), diabetes mellitus, stroke, pregnancy, and premature/low-birth-weight babies. The first Surgeon General's report on "Oral Health in America" published in 2000 recognized the importance of dental health in the overall general health and well-being of a patient, and oral diseases have been addressed as an "overlooked epidemic". 20 Recognition and establishment of endodontic infection and consequent CAP as a potential risk factor or marker for CHD is still to be fully investigated, though researches have indicated a weak, but positive, association between the two conditions. 21e24 Barring a few case-controlled clinical trials, most of the studies obtained by the electronic search were retrospective cross-sectional studies, reviews, or cross-sectional surveys. Studies on the microbial association of atherosclerosis and dental caries/endodontic lesions Streptococcus mutans, the major caries-causing bacteria, is also a known pathogen for infective endocarditis and has been isolated in atherogenic plaques and extirpated heart valves, thus leading to the speculation of the role of oral streptococci in the development of cardiovascular disease. A molecular analysis of oral biofilm in atherosclerotic plaques and evaluation of decayed, missing, and filled teeth (DMFT) scores by Fernandes et al. revealed that Streptococcus mutans were found at a high frequency (100% specimens) in oral and vascular samples, even in edentulous patients, and its presence in atherosclerotic plaques suggests the possible role of this bacterium in the disease progression. 25 In a microbiological study using polymerase chain reaction (PCR) analysis, Nakano et al. (2006) studied specimens of atheromas, extirpated heart valves, and dental plaque from same subjects. 26 They found that although the composition of dental plaque was different from that of valvular plaque, a significant amount of Streptococcus mutans was present in both atheromas and valves. Their results suggested that Streptococcus mutans may be a causative agent for cardiovascular disease. Transient bacteremia after dental procedures such as scaling, extractions, root canal treatment, and even after chewing is a known phenomenon and may be the reason for the presence of oral streptococci in the cardiovascular region, but further studies are required to ascertain whether these strains were cariogenic or not. 27 It has been established that in less than a minute after an oral intervention, oral microorganisms can reach the heart, lungs, as well as the peripheral capillary system. Besides periodontal disease, chronic apical inflammations of endodontic origin could be considered as a possible source of bacteremia. Studies on association of CAD and DMFT index/endodontic lesions A possible clinical association between peripheral artery disease, periodontal disease, and dental caries was studied by Soto-Barreras et al., and it was seen that patients with attachment loss !4 mm had a six-fold increased risk of having peripheral artery disease and also a significantly high DMFT index and C-reactive protein (CRP) levels compared with the control group. 28 In a first of its kind cross-sectional retrospective study of computed tomography (CT) data of patients, Glodny et al. correlated patient's decayed, missing, and filled teeth or surface scores with the aortic calcification and concluded that patients with at least one tooth with caries or CAP had a higher atherosclerotic burden, whereas the number of restorations varied inversely with the atherosclerotic burden. They emphasized on the fact that initial caries, caries with or without pulpal involvement, and CAP are different stages of the same inflammatory condition, so caries without CAP may also be considered as an independent risk factor for atherosclerosis. 29 In another cross-sectional retrospective study of the whole-body CT scans of the patients, Peterson et al. (2014) 30 for the first time quantified aortic atherosclerotic burden by calcification scoring method and found that it related positively with some teeth with CAP but without any endodontic treatment. Conversely, endodontically treated teeth even with CAP did not contribute to the atherosclerotic burden. In this study, the factor CAP without endodontic treatment was more significant than gender, marginal periodontitis, and caries and about one-fourth as significant as age. 30 Caplan et al. (2009) evaluated the relationship between a self-reported history of endodontic treatment and prevalent CHD in Atherosclerosis Risk in Communities study. Final multivariable regression models indicated that among participants with 25 or more teeth, those reporting having had endodontic treatment two or more times had 1.62 times the odds of prevalent CHD compared with those reporting never having had endodontic treatment. 31 In a case-controlled clinical trial, Pasqualini et al. (2012) compared middle-aged adults with acute myocardial infarction (AMI) or unstable angina within 12 months, with healthy controls. Indicators of oral disease and compliance were evaluated. CD14 polymorphisms were analyzed by restriction fragment length polymorphismePCR and CHD. No statistically significant association emerged between the CD14C (À260)T and the CD14C (À159)T polymorphism, endodontic or periodontal disease, and CHD, and they concluded that chronic oral infections might be an unconventional risk factor for CHD. 32 Willershausen et al. (2009) conducted a case-control study to assess an association between chronic dental inflammation and AMI and deduced that patients with a history of AMI exhibited an 24 To establish the relationship between chronic apical periodontitis and coronary artery disease Cross-sectional study (103 patients who underwent coronary angiography) The patients with chronic apical periodontitis had 2.79 times higher risk of developing coronary artery disease. Chronic apical periodontitis was independently associated with coronary artery disease. Caplan DJ (2014) 37 This study evaluated whether incident radiographically evident lesions of endodontic origin were related to development of coronary heart disease (CHD) At baseline and every three years for up to 32 years, 708 male participants received complete medical and dental examinations, including full-mouth radiographs. Among those aged 40 years, incident lesions of endodontic origin were significantly associated with time to CHD diagnosis (p < 0.05). Among those aged >40 years, no statistically significant association was observed. Peterson et al. (2014) 30 To estimate the effect of chronic apical periodontitis and its management on atherosclerotic burden Retrospective, cross-sectional study; a total of 531 patients (11,901 teeth), with mean age of 50 years (range 8e89 years; 259 females/272 males), who had had a wholebody computed tomography (CT) scan were evaluated. The volume of the aortic atherosclerotic burden for patients with at least one chronic apical periodontitis (CAP) lesion was 0.32 ± 0.92 mL higher than that for patients with no CAP (0.17 ± 0.51 mL; p < 0.05). Segura-Egea et al. (2012) 18 To investigate the prevalence of apical periodontitis and endodontic treatment in hypertensive patients and control subjects without hypertension. In a cross-sectional study, records of 40 hypertensive patients and 51 control subjects were examined. Periapical status of all teeth was assessed by using the periapical index score. 33 To study whether the association between dental chronic inflammatory diseases and the occurrence of acute myocardial infarction (AMI) could be established to study possible risk factors for CHD. 125 patients with AMI aged between 50 and 82 years; the control patients were a group of matched subjects (gender, age, ethnicity, and smoking habits) in good health. Patients with AMI exhibited a significantly higher number of missing teeth (p ¼ .001), less teeth with root canal fillings (p ¼ .0015), a higher number of radiologic apical lesions (p ¼ .001), and a higher PSI value (p ¼ .001) than individuals without myocardial infarction. The medical data showed nonsignificant correlation between C-reactive protein (CRP) and the number of radiologic apical lesions. Caplan et al. (2009) 31 To evaluate the relationship between selfreported history of endodontic treatment (ET) and prevalent CHD among dentate participants with the risk of atherosclerosis 15,792 patients visited hospital between (1987)(1988)(1989) Among participants with 25 or more teeth, those reporting having had ET two or more times had 1.62 (95% CI, 1.04e2.53) times the odds of prevalent CHD compared with those reporting never having had ET. Frisk and Hakeberg (2005) 22 Endodontic status in Swedish populations and possible association between apical periodontitis (AP) and CHD 3499 women participants and random samples of dentate individuals (n ¼ 2066) aged 20e70 years No significant association between AP and CHD and socioeconomic risk factors and AP Joshipura et al. (2006) 23 Possible association between pulpal inflammation (endodontic treatment) and incidence of CHD 34,683 participants Strong association between a positive selfreported history of endodontic treatment and incidence of CHD Arroll et al. (2010) 34 To explore the relationship between CRP as a marker of inflammation and presence and number of root canal treatments in primary care patients. 38 To assess the association between apical periodontitis and cardiovascular disease Systematic review; 13 of the 19 included studies found a significant positive association between apical periodontitis and cardiovascular disease, although in two of them, the significance was present only in univariate analysis. Five studies failed to reveal positive significance, and one study reported a negative association. Although most of the published studies found a positive association between apical periodontitis and cardiovascular disease, the quality of the existing evidence is moderate to low, and a causal relationship cannot be established. increased number of periapical radiolucencies, had lesser root canalefilled teeth, and a higher number of missing teeth. Also, a nonsignificant increase in CRP levels could be seen in patients with a larger number of periapical radiolucencies. 33 Conclusion/future directions Atherosclerosis shares a common spectrum of inflammatory markers with apical periodontitis. Various immune inflammatory mediators against bacteria from an infected root canal via phagocytosis and activation of humoral and cellular responses, viz., inflammatory cytokines (interleukins 1b), tumor necrosis factor-a, reactive oxygen species, and matrix metalloproteinases, play a crucial role in the development of apical periodontitis and are also potentially responsible for endothelial dysfunction and atherosclerosis. 28,32,34 CRP levels are linked with different aspects of the cardiovascular risk spectrum and have been widely acknowledged as an important risk indicator for CHD and can forewarn future cardiovascular events. It has been seen that low-grade inflammation associated with a chronic infection, such as CAP, causes elevated levels of CRP. Thus, treatment and resolution of these conditions may be helpful in reduction inflammation and subsequently CRP levels. 33,34 The possible correlation between CHD and CAP is emerging clearly at microbiological, clinical, inflammatory, as well as molecular levels. 35e37 More recently, Berlin-Broner Y et al. in a systematic review studied the association between apical periodontitis and cardiovascular disease and concluded that though there is positive association between apical periodontitis and cardiovascular disease, the quality of the existing evidence is moderate to low, and a causal relationship cannot be established. 38 Though at present literature shows a weak, yet positive, association between CAP and CHD, let us not wait in inaction for the evidence to prove it. The oral health-care providers must expand their role and should emphasize on the fact that oral health can contribute to better overall health outcomes. Similarly, the medical health-care providers should encourage their patients to maintain oral health and should refer them to the oral health-care provider for regular checkups. Finally, the dental and medical fraternity should have an integrative approach toward the diagnosis and management of chronic diseases having a shared underlying etiopathogenesis and having an effect on the treatment outcomes. Conflicts of interest All authors have none to declare.
package com.github.paolorotolo.gitty_reporter; import android.animation.Animator; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Color; import android.graphics.Typeface; import android.net.Uri; import android.os.Build; import android.support.annotation.Nullable; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.TextInputLayout; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.AppCompatButton; import android.support.v7.widget.AppCompatCheckBox; import android.text.TextUtils; import android.text.method.PasswordTransformationMethod; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewAnimationUtils; import android.view.animation.AccelerateInterpolator; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import org.eclipse.egit.github.core.Issue; import org.eclipse.egit.github.core.Repository; import org.eclipse.egit.github.core.client.GitHubClient; import org.eclipse.egit.github.core.service.IssueService; import org.eclipse.egit.github.core.service.RepositoryService; import java.io.IOException; public abstract class GittyReporter extends AppCompatActivity { private EditText bugTitleEditText; private EditText bugDescriptionEditText; private EditText deviceInfoEditText; private String deviceInfo; private String targetUser; private String targetRepository; private String gitUser; private String gitPassword; private String extraInfo; private String gitToken; private Boolean enableGitHubLogin = true; private Boolean enableGuestGitHubLogin = true; @Override final protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.gitty_reporter_layout); // Get Device info and print them in EditText deviceInfoEditText = (EditText) findViewById(R.id.gittyreporter_device_info); getDeviceInfo(); deviceInfoEditText.setText(deviceInfo); init(savedInstanceState); final View nextFab = findViewById(R.id.gittyreporter_fab_next); final View sendFab = findViewById(R.id.gittyreporter_fab_send); if (!enableGitHubLogin){ nextFab.setVisibility(View.INVISIBLE); sendFab.setVisibility(View.VISIBLE); } AppCompatCheckBox githubCheckbox = (AppCompatCheckBox) findViewById(R.id.gittyreporter_github_checkbox); AppCompatButton registerButton = (AppCompatButton) findViewById(R.id.gittyreporter_github_register); final EditText userName = (EditText) findViewById(R.id.gittyreporter_login_username); final EditText userPassword = (EditText) findViewById(R.id.gittyreporter_login_password); userPassword.setTypeface(Typeface.DEFAULT); userPassword.setTransformationMethod(new PasswordTransformationMethod()); if (!enableGuestGitHubLogin){ githubCheckbox.setChecked(false); githubCheckbox.setVisibility(View.GONE); registerButton.setVisibility(View.VISIBLE); } githubCheckbox.setOnCheckedChangeListener( new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked){ userName.setEnabled(false); userName.setText(""); userPassword.setEnabled(false); userPassword.setText(""); } else { userName.setEnabled(true); userPassword.setEnabled(true); } } } ); } public void reportIssue (View v) { if (enableGitHubLogin) { final AppCompatCheckBox githubCheckbox = (AppCompatCheckBox) findViewById(R.id.gittyreporter_github_checkbox); EditText userName = (EditText) findViewById(R.id.gittyreporter_login_username); EditText userPassword = (EditText) findViewById(R.id.gittyreporter_login_password); if (!githubCheckbox.isChecked()){ if (validateGitHubLogin()){ this.gitUser = userName.getText().toString(); this.gitPassword = <PASSWORD>Password.getText().toString(); sendBugReport(); } } else { this.gitUser = ""; this.gitPassword = ""; sendBugReport(); } } else { if (validateBugReport()) { this.gitUser = ""; this.gitPassword = ""; sendBugReport(); } } } private boolean validateGitHubLogin(){ EditText userName = (EditText) findViewById(R.id.gittyreporter_login_username); EditText userPassword = (EditText) findViewById(R.id.gittyreporter_login_password); boolean hasErrors = false; if (TextUtils.isEmpty(userName.getText())){ setError(userName, "Please enter a vaild username"); hasErrors = true; } else { removeError(userName); } if (TextUtils.isEmpty(userPassword.getText())) { setError(userPassword, "Please enter a vaild password"); hasErrors = true; } else { removeError(userPassword); } return !hasErrors; } private boolean validateBugReport(){ bugTitleEditText = (EditText) findViewById(R.id.gittyreporter_bug_title); bugDescriptionEditText = (EditText) findViewById(R.id.gittyreporter_bug_description); boolean hasErrors = false; if (TextUtils.isEmpty(bugTitleEditText.getText())) { setError(bugTitleEditText, "Please enter a valid title"); hasErrors = true; } else { removeError(bugTitleEditText); } if (TextUtils.isEmpty(bugDescriptionEditText.getText())) { setError(bugDescriptionEditText, "Please describe your issue"); hasErrors = true; } else { removeError(bugDescriptionEditText); } return !hasErrors; } private void setError(TextView view, String text) { TextInputLayout parent = (TextInputLayout) view.getParent(); // there is a small flashing when the error is set again // the only way to fix that is to track if the error is // currently shown, because for some reason TextInputLayout // doesn't provide any getError methods. parent.setError(text); } private void removeError(TextView view) { TextInputLayout parent = (TextInputLayout) view.getParent(); parent.setError(null); } private void sendBugReport(){ bugTitleEditText = (EditText) findViewById(R.id.gittyreporter_bug_title); bugDescriptionEditText = (EditText) findViewById(R.id.gittyreporter_bug_description); final String bugTitle = bugTitleEditText.getText().toString(); final String bugDescription = bugDescriptionEditText.getText().toString(); if (extraInfo == null) { this.extraInfo = "Nothing to show."; } else if (!enableGitHubLogin){ this.gitUser = ""; this.gitPassword = ""; } new reportIssue(GittyReporter.this, this).execute(gitUser, gitPassword, bugTitle, bugDescription, deviceInfo, targetUser, targetRepository, extraInfo, gitToken, enableGitHubLogin.toString()); } public void showLoginPage (View v) { if (validateBugReport()) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { animateLoginPage(); } else { View loginView = findViewById(R.id.gittyreporter_loginFrame); View nextFab = findViewById(R.id.gittyreporter_fab_next); View sendFab = findViewById(R.id.gittyreporter_fab_send); loginView.setVisibility(View.VISIBLE); nextFab.setVisibility(View.INVISIBLE); sendFab.setVisibility(View.VISIBLE); } } } private void animateLoginPage(){ final View colorView = findViewById(R.id.gittyreporter_material_ripple); final View loginView = findViewById(R.id.gittyreporter_loginFrame); final View nextFab = findViewById(R.id.gittyreporter_fab_next); final View sendFab = findViewById(R.id.gittyreporter_fab_send); final AlphaAnimation fadeOutColorAnim = new AlphaAnimation(1.0f, 0.0f); fadeOutColorAnim.setDuration(400); fadeOutColorAnim.setInterpolator(new AccelerateInterpolator()); final AlphaAnimation fadeOutFabAnim = new AlphaAnimation(1.0f, 0.0f); fadeOutFabAnim.setDuration(400); fadeOutFabAnim.setInterpolator(new AccelerateInterpolator()); final AlphaAnimation fadeInAnim = new AlphaAnimation(0.0f, 1.0f); fadeInAnim.setDuration(400); fadeInAnim.setInterpolator(new AccelerateInterpolator()); fadeOutColorAnim.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { loginView.setVisibility(View.VISIBLE); } @Override public void onAnimationEnd(Animation animation) { colorView.setVisibility(View.GONE); } @Override public void onAnimationRepeat(Animation animation) { } }); fadeOutFabAnim.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { sendFab.setVisibility(View.VISIBLE); sendFab.startAnimation(fadeInAnim); } @Override public void onAnimationRepeat(Animation animation) { } }); int cx = (colorView.getRight()); int cy = (colorView.getBottom()); int finalRadius = Math.max(colorView.getWidth(), colorView.getHeight()); Animator rippleAnim = ViewAnimationUtils.createCircularReveal(colorView, cx, cy, 0, finalRadius); rippleAnim.setInterpolator(new AccelerateInterpolator()); rippleAnim.addListener(new android.animation.Animator.AnimatorListener() { @Override public void onAnimationStart(android.animation.Animator animation) { } @Override public void onAnimationRepeat(android.animation.Animator animation) { } @Override public void onAnimationEnd(android.animation.Animator animation) { colorView.startAnimation(fadeOutColorAnim); nextFab.startAnimation(fadeOutFabAnim); nextFab.setVisibility(View.INVISIBLE); } @Override public void onAnimationCancel(android.animation.Animator animation) { } }); colorView.setVisibility(View.VISIBLE); rippleAnim.start(); } public void showDoneAnimation(){ final View doneView = findViewById(R.id.gittyreporter_doneFrame); final View doneImage = findViewById(R.id.gittyreporter_done_image); final View sendFab = findViewById(R.id.gittyreporter_fab_send); final AlphaAnimation fadeOutColorAnim = new AlphaAnimation(1.0f, 0.0f); fadeOutColorAnim.setDuration(1000); fadeOutColorAnim.setInterpolator(new AccelerateInterpolator()); final AlphaAnimation fadeOutFabAnim = new AlphaAnimation(1.0f, 0.0f); fadeOutFabAnim.setDuration(400); fadeOutFabAnim.setInterpolator(new AccelerateInterpolator()); fadeOutColorAnim.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { doneImage.setVisibility(View.INVISIBLE); finish(); } @Override public void onAnimationRepeat(Animation animation) { } }); fadeOutFabAnim.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { sendFab.setVisibility(View.INVISIBLE); } @Override public void onAnimationRepeat(Animation animation) { } }); int cx = (doneView.getRight()); int cy = (doneView.getBottom()); int finalRadius = Math.max(doneView.getWidth(), doneView.getHeight()); Animator rippleAnim = ViewAnimationUtils.createCircularReveal(doneView, cx, cy, 0, finalRadius); rippleAnim.setInterpolator(new AccelerateInterpolator()); rippleAnim.addListener(new android.animation.Animator.AnimatorListener() { @Override public void onAnimationStart(android.animation.Animator animation) { sendFab.startAnimation(fadeOutFabAnim); } @Override public void onAnimationRepeat(android.animation.Animator animation) { } @Override public void onAnimationEnd(android.animation.Animator animation) { doneImage.startAnimation(fadeOutColorAnim); } @Override public void onAnimationCancel(android.animation.Animator animation) { } }); doneView.setVisibility(View.VISIBLE); rippleAnim.start(); } public void setTargetRepository(String user, String repository){ this.targetUser = user; this.targetRepository = repository; } public void setGuestOAuth2Token(String token){ this.gitToken = token; } public void setExtraInfo(String info){ this.extraInfo = info; } public void enableUserGitHubLogin(boolean enableLogin){ this.enableGitHubLogin = enableLogin; } public void enableGuestGitHubLogin(boolean enableGuest){ this.enableGuestGitHubLogin = enableGuest; } public void canEditDebugInfo(boolean canEdit){ deviceInfoEditText.setEnabled(canEdit); } public void setFabColor1(int colorNormal, int colorPressed, int colorRipple){ final com.melnykov.fab.FloatingActionButton nextFab = (com.melnykov.fab.FloatingActionButton) findViewById(R.id.gittyreporter_fab_next); nextFab.setColorNormal(colorNormal); nextFab.setColorPressed(colorPressed); nextFab.setColorRipple(colorRipple); } public void setFabColor2(int colorNormal, int colorPressed, int colorRipple){ final com.melnykov.fab.FloatingActionButton sendFab = (com.melnykov.fab.FloatingActionButton) findViewById(R.id.gittyreporter_fab_send); sendFab.setColorNormal(colorNormal); sendFab.setColorPressed(colorPressed); sendFab.setColorRipple(colorRipple); } public void setBackgroundColor1(int color){ FrameLayout view = (FrameLayout) findViewById(R.id.gittyreporter_reportFrame); view.setBackgroundColor(color); } public void setBackgroundColor2(int color){ FrameLayout view = (FrameLayout) findViewById(R.id.gittyreporter_loginFrame); view.setBackgroundColor(color); } public void setRippleColor(int color){ FrameLayout ripple = (FrameLayout) findViewById(R.id.gittyreporter_material_ripple); ripple.setBackgroundColor(color); } public void setTitleColor1(int color){ TextView view = (TextView) findViewById(R.id.gittyreporter_title_1); view.setTextColor(color); } public void setTitleColor2(int color){ TextView view = (TextView) findViewById(R.id.gittyreporter_title_2); view.setTextColor(color); } @Override public void onBackPressed() { View loginView = findViewById(R.id.gittyreporter_loginFrame); if (loginView.getVisibility() == View.VISIBLE){ View nextFab = findViewById(R.id.gittyreporter_fab_next); View sendFab = findViewById(R.id.gittyreporter_fab_send); loginView.setVisibility(View.INVISIBLE); nextFab.setVisibility(View.VISIBLE); sendFab.setVisibility(View.INVISIBLE); } else { finish(); } } public void openGitHubRegisterPage(View v){ Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/join")); startActivity(browserIntent); } private void getDeviceInfo() { try { String s = "Debug info:"; s += "\n OS Version: " + System.getProperty("os.version") + "(" + android.os.Build.VERSION.INCREMENTAL + ")"; s += "\n OS API Level: " + android.os.Build.VERSION.SDK_INT; s += "\n Device: " + android.os.Build.DEVICE; s += "\n Model (and Product): " + android.os.Build.MODEL + " ("+ android.os.Build.PRODUCT + ")"; s += "\n RELEASE: " + android.os.Build.VERSION.RELEASE; s += "\n BRAND: " + android.os.Build.BRAND; s += "\n DISPLAY: " + android.os.Build.DISPLAY; s += "\n CPU_ABI: " + android.os.Build.CPU_ABI; s += "\n CPU_ABI2: " + android.os.Build.CPU_ABI2; s += "\n HARDWARE: " + android.os.Build.HARDWARE; s += "\n MANUFACTURER: " + android.os.Build.MANUFACTURER; deviceInfo = s; } catch (Exception e) { Log.e("gitty-reporter", "Error getting Device INFO"); } } public abstract void init(@Nullable Bundle savedInstanceState); }
/** * @author Justin Guerra * @since 4/26/17 */ public class ConfigurationTest { @Test public void csvHostList() { List<String> expectedHostList = Arrays.asList("jenkins-ut1", "jenkins-ut2", "jenkins-ut3", "jenkins-ut4"); List<String> parsedHostList = Configuration.parseHostList("jenkins-ut1,jenkins-ut2,jenkins-ut3,jenkins-ut4"); assertEquals(expectedHostList, parsedHostList); } @Test public void rangeHostList() { List<String> expectedHostList = Arrays.asList("jenkins-ut1", "jenkins-ut2", "jenkins-ut3", "jenkins-ut4"); List<String> parsedHostList = Configuration.parseHostList("jenkins-ut{1-4}"); assertEquals(expectedHostList, parsedHostList); } @Test public void rangeHostListDoubleDigit() { List<String> expectedHostList = Arrays.asList("jenkins-ut10", "jenkins-ut11", "jenkins-ut12", "jenkins-ut13"); List<String> parsedHostList = Configuration.parseHostList("jenkins-ut{10-13}"); assertEquals(expectedHostList, parsedHostList); } @Test public void configurationFromEnvironment() { List<String> hosts = Arrays.asList("host1", "host2", "host3"); Map<String, String> env = createEnvironmentMap(hosts); Configuration configuration = Configuration.newConfigurationFromEnv(env, Collections.emptyMap()); assertEquals("whatever", configuration.getSlaveName()); assertEquals("hydra.com", configuration.getRemoteHost()); assertEquals("jobName", configuration.getJobName()); assertEquals("buildTag", configuration.getBuildTag()); assertEquals(500, configuration.getClientTimeout()); assertEquals(hosts, configuration.getHostList()); assertTrue(configuration.isHttps()); assertNull(configuration.getRemotePort()); } @Test public void configurationFromEnvironmentWithOverrides() { Map<String, String> derp = createEnvironmentMap(Arrays.asList("host1", "host2", "host3")); List<String> overriddenHosts = Arrays.asList("host2", "host3", "host4"); Map<String, String> overrides = new HashMap<>(); overrides.put(Configuration.ENV_HOST_NAME, "otherHostName"); overrides.put(Configuration.ENV_HYDRA_ADDRESS, "hydra:12345"); overrides.put(Configuration.ENV_HYDRA_HOSTS, String.join(",", overriddenHosts)); overrides.put(Configuration.ENV_JOB_NAME, "coolJob"); overrides.put(Configuration.ENV_BUILD_TAG, "otherTag"); overrides.put(Configuration.ENV_HYDRA_CLIENT_TIMEOUT, "1000"); overrides.put(Configuration.ENV_HYDRA_HTTPS, "false"); Configuration configuration = Configuration.newConfigurationFromEnv(derp, overrides); assertEquals("otherHostName", configuration.getSlaveName()); assertEquals("hydra", configuration.getRemoteHost()); assertEquals(12345, configuration.getRemotePort().intValue()); assertEquals("coolJob", configuration.getJobName()); assertEquals("otherTag", configuration.getBuildTag()); assertEquals(1000, configuration.getClientTimeout()); assertEquals(overriddenHosts, configuration.getHostList()); assertFalse(configuration.isHttps()); } private Map<String, String> createEnvironmentMap(List<String> hosts) { Map<String, String> env = new HashMap<>(); env.put(Configuration.ENV_HOST_NAME, "whatever"); env.put(Configuration.ENV_HYDRA_ADDRESS, "hydra.com"); env.put(Configuration.ENV_HYDRA_HOSTS, String.join(",", hosts)); env.put(Configuration.ENV_JOB_NAME, "jobName"); env.put(Configuration.ENV_BUILD_TAG, "buildTag"); env.put(Configuration.ENV_HYDRA_CLIENT_TIMEOUT, "500"); env.put(Configuration.ENV_HYDRA_HTTPS, "true"); return env; } }
<gh_stars>0 package cmd import ( "context" "fmt" "github.com/romana/rlog" "github.com/up9inc/mizu/cli/kubernetes" "github.com/up9inc/mizu/cli/mizu" "github.com/up9inc/mizu/shared" "github.com/up9inc/mizu/shared/debounce" core "k8s.io/api/core/v1" "log" "net/http" "net/url" "os" "os/signal" "regexp" "syscall" "time" ) var mizuServiceAccountExists bool var aggregatorService *core.Service const ( updateTappersDelay = 5 * time.Second ) var currentlyTappedPods []core.Pod func RunMizuTap(podRegexQuery *regexp.Regexp, tappingOptions *MizuTapOptions) { mizuApiFilteringOptions, err := getMizuApiFilteringOptions(tappingOptions) if err != nil { return } kubernetesProvider := kubernetes.NewProvider(tappingOptions.KubeConfigPath) defer cleanUpMizuResources(kubernetesProvider) ctx, cancel := context.WithCancel(context.Background()) defer cancel() // cancel will be called when this function exits targetNamespace := getNamespace(tappingOptions, kubernetesProvider) if matchingPods, err := kubernetesProvider.GetAllPodsMatchingRegex(ctx, podRegexQuery, targetNamespace); err != nil { return } else { currentlyTappedPods = matchingPods } var namespacesStr string if targetNamespace != mizu.K8sAllNamespaces { namespacesStr = fmt.Sprintf("namespace \"%s\"", targetNamespace) } else { namespacesStr = "all namespaces" } fmt.Printf("Tapping pods in %s\n", namespacesStr) if len(currentlyTappedPods) == 0 { var suggestionStr string if targetNamespace != mizu.K8sAllNamespaces { suggestionStr = "\nSelect a different namespace with -n or tap all namespaces with -A" } fmt.Printf("Did not find any pods matching the regex argument%s\n", suggestionStr) } nodeToTappedPodIPMap, err := getNodeHostToTappedPodIpsMap(currentlyTappedPods) if err != nil { return } if err := createMizuResources(ctx, kubernetesProvider, nodeToTappedPodIPMap, tappingOptions, mizuApiFilteringOptions); err != nil { return } go portForwardApiPod(ctx, kubernetesProvider, cancel, tappingOptions) // TODO convert this to job for built in pod ttl or have the running app handle this go watchPodsForTapping(ctx, kubernetesProvider, cancel, podRegexQuery, tappingOptions) go syncApiStatus(ctx, cancel, tappingOptions) //block until exit signal or error waitForFinish(ctx, cancel) } func createMizuResources(ctx context.Context, kubernetesProvider *kubernetes.Provider, nodeToTappedPodIPMap map[string][]string, tappingOptions *MizuTapOptions, mizuApiFilteringOptions *shared.TrafficFilteringOptions) error { if err := createMizuAggregator(ctx, kubernetesProvider, tappingOptions, mizuApiFilteringOptions); err != nil { return err } if err := updateMizuTappers(ctx, kubernetesProvider, nodeToTappedPodIPMap, tappingOptions); err != nil { return err } return nil } func createMizuAggregator(ctx context.Context, kubernetesProvider *kubernetes.Provider, tappingOptions *MizuTapOptions, mizuApiFilteringOptions *shared.TrafficFilteringOptions) error { var err error mizuServiceAccountExists = createRBACIfNecessary(ctx, kubernetesProvider) _, err = kubernetesProvider.CreateMizuAggregatorPod(ctx, mizu.ResourcesNamespace, mizu.AggregatorPodName, tappingOptions.MizuImage, mizuServiceAccountExists, mizuApiFilteringOptions, tappingOptions.MaxEntriesDBSizeBytes) if err != nil { fmt.Printf("Error creating mizu collector pod: %v\n", err) return err } aggregatorService, err = kubernetesProvider.CreateService(ctx, mizu.ResourcesNamespace, mizu.AggregatorPodName, mizu.AggregatorPodName) if err != nil { fmt.Printf("Error creating mizu collector service: %v\n", err) return err } return nil } func getMizuApiFilteringOptions(tappingOptions *MizuTapOptions) (*shared.TrafficFilteringOptions, error) { var compiledRegexSlice []*shared.SerializableRegexp if tappingOptions.PlainTextFilterRegexes != nil && len(tappingOptions.PlainTextFilterRegexes) > 0 { compiledRegexSlice = make([]*shared.SerializableRegexp, 0) for _, regexStr := range tappingOptions.PlainTextFilterRegexes { compiledRegex, err := shared.CompileRegexToSerializableRegexp(regexStr) if err != nil { fmt.Printf("Regex %s is invalid: %v", regexStr, err) return nil, err } compiledRegexSlice = append(compiledRegexSlice, compiledRegex) } } return &shared.TrafficFilteringOptions{PlainTextMaskingRegexes: compiledRegexSlice, HideHealthChecks: tappingOptions.HideHealthChecks}, nil } func updateMizuTappers(ctx context.Context, kubernetesProvider *kubernetes.Provider, nodeToTappedPodIPMap map[string][]string, tappingOptions *MizuTapOptions) error { if len(nodeToTappedPodIPMap) > 0 { if err := kubernetesProvider.ApplyMizuTapperDaemonSet( ctx, mizu.ResourcesNamespace, mizu.TapperDaemonSetName, tappingOptions.MizuImage, mizu.TapperPodName, fmt.Sprintf("%s.%s.svc.cluster.local", aggregatorService.Name, aggregatorService.Namespace), nodeToTappedPodIPMap, mizuServiceAccountExists, tappingOptions.TapOutgoing, ); err != nil { fmt.Printf("Error creating mizu tapper daemonset: %v\n", err) return err } } else { if err := kubernetesProvider.RemoveDaemonSet(ctx, mizu.ResourcesNamespace, mizu.TapperDaemonSetName); err != nil { fmt.Printf("Error deleting mizu tapper daemonset: %v\n", err) return err } } return nil } func cleanUpMizuResources(kubernetesProvider *kubernetes.Provider) { fmt.Printf("\nRemoving mizu resources\n") removalCtx, _ := context.WithTimeout(context.Background(), 5*time.Second) if err := kubernetesProvider.RemovePod(removalCtx, mizu.ResourcesNamespace, mizu.AggregatorPodName); err != nil { fmt.Printf("Error removing Pod %s in namespace %s: %s (%v,%+v)\n", mizu.AggregatorPodName, mizu.ResourcesNamespace, err, err, err) } if err := kubernetesProvider.RemoveService(removalCtx, mizu.ResourcesNamespace, mizu.AggregatorPodName); err != nil { fmt.Printf("Error removing Service %s in namespace %s: %s (%v,%+v)\n", mizu.AggregatorPodName, mizu.ResourcesNamespace, err, err, err) } if err := kubernetesProvider.RemoveDaemonSet(removalCtx, mizu.ResourcesNamespace, mizu.TapperDaemonSetName); err != nil { fmt.Printf("Error removing DaemonSet %s in namespace %s: %s (%v,%+v)\n", mizu.TapperDaemonSetName, mizu.ResourcesNamespace, err, err, err) } } func watchPodsForTapping(ctx context.Context, kubernetesProvider *kubernetes.Provider, cancel context.CancelFunc, podRegex *regexp.Regexp, tappingOptions *MizuTapOptions) { targetNamespace := getNamespace(tappingOptions, kubernetesProvider) added, modified, removed, errorChan := kubernetes.FilteredWatch(ctx, kubernetesProvider.GetPodWatcher(ctx, targetNamespace), podRegex) restartTappers := func() { if matchingPods, err := kubernetesProvider.GetAllPodsMatchingRegex(ctx, podRegex, targetNamespace); err != nil { fmt.Printf("Error getting pods by regex: %s (%v,%+v)\n", err, err, err) cancel() } else { currentlyTappedPods = matchingPods } nodeToTappedPodIPMap, err := getNodeHostToTappedPodIpsMap(currentlyTappedPods) if err != nil { fmt.Printf("Error building node to ips map: %s (%v,%+v)\n", err, err, err) cancel() } if err := updateMizuTappers(ctx, kubernetesProvider, nodeToTappedPodIPMap, tappingOptions); err != nil { fmt.Printf("Error updating daemonset: %s (%v,%+v)\n", err, err, err) cancel() } } restartTappersDebouncer := debounce.NewDebouncer(updateTappersDelay, restartTappers) for { select { case newTarget := <-added: fmt.Printf(mizu.Green, fmt.Sprintf("+%s\n", newTarget.Name)) case removedTarget := <-removed: fmt.Printf(mizu.Red, fmt.Sprintf("-%s\n", removedTarget.Name)) restartTappersDebouncer.SetOn() case modifiedTarget := <-modified: // Act only if the modified pod has already obtained an IP address. // After filtering for IPs, on a normal pod restart this includes the following events: // - Pod deletion // - Pod reaches start state // - Pod reaches ready state // Ready/unready transitions might also trigger this event. if modifiedTarget.Status.PodIP != "" { restartTappersDebouncer.SetOn() } case <-errorChan: // TODO: Does this also perform cleanup? cancel() case <-ctx.Done(): return } } } func portForwardApiPod(ctx context.Context, kubernetesProvider *kubernetes.Provider, cancel context.CancelFunc, tappingOptions *MizuTapOptions) { podExactRegex := regexp.MustCompile(fmt.Sprintf("^%s$", mizu.AggregatorPodName)) added, modified, removed, errorChan := kubernetes.FilteredWatch(ctx, kubernetesProvider.GetPodWatcher(ctx, mizu.ResourcesNamespace), podExactRegex) isPodReady := false for { select { case <-added: continue case <-removed: fmt.Printf("%s removed\n", mizu.AggregatorPodName) cancel() return case modifiedPod := <-modified: if modifiedPod.Status.Phase == "Running" && !isPodReady { isPodReady = true go func() { err := kubernetes.StartProxy(kubernetesProvider, tappingOptions.GuiPort, mizu.ResourcesNamespace, mizu.AggregatorPodName) if err != nil { fmt.Printf("Error occured while running k8s proxy %v\n", err) cancel() } }() mizuProxiedUrl := kubernetes.GetMizuCollectorProxiedHostAndPath(tappingOptions.GuiPort) fmt.Printf("Mizu is available at http://%s\n", mizuProxiedUrl) time.Sleep(time.Second * 5) // Waiting to be sure the proxy is ready if tappingOptions.Analysis { urlPath := fmt.Sprintf("http://%s/api/uploadEntries?dest=%s&interval=%v", mizuProxiedUrl, url.QueryEscape(tappingOptions.AnalysisDestination), tappingOptions.SleepIntervalSec) u, err := url.ParseRequestURI(urlPath) if err != nil { log.Fatal(fmt.Sprintf("Failed parsing the URL %v\n", err)) } rlog.Debugf("Sending get request to %v\n", u.String()) if response, err := http.Get(u.String()); err != nil || response.StatusCode != 200 { fmt.Printf("error sending upload entries req, status code: %v, err: %v\n", response.StatusCode, err) } else { fmt.Printf(mizu.Purple, "Traffic is uploading to UP9 for further analsys") fmt.Println() } } } case <-time.After(25 * time.Second): if !isPodReady { fmt.Printf("error: %s pod was not ready in time", mizu.AggregatorPodName) cancel() } case <-errorChan: cancel() case <-ctx.Done(): return } } } func createRBACIfNecessary(ctx context.Context, kubernetesProvider *kubernetes.Provider) bool { mizuRBACExists, err := kubernetesProvider.DoesMizuRBACExist(ctx, mizu.ResourcesNamespace) if err != nil { fmt.Printf("warning: could not ensure mizu rbac resources exist %v\n", err) return false } if !mizuRBACExists { err := kubernetesProvider.CreateMizuRBAC(ctx, mizu.ResourcesNamespace, mizu.RBACVersion) if err != nil { fmt.Printf("warning: could not create mizu rbac resources %v\n", err) return false } } return true } func getNodeHostToTappedPodIpsMap(tappedPods []core.Pod) (map[string][]string, error) { nodeToTappedPodIPMap := make(map[string][]string, 0) for _, pod := range tappedPods { existingList := nodeToTappedPodIPMap[pod.Spec.NodeName] if existingList == nil { nodeToTappedPodIPMap[pod.Spec.NodeName] = []string{pod.Status.PodIP} } else { nodeToTappedPodIPMap[pod.Spec.NodeName] = append(nodeToTappedPodIPMap[pod.Spec.NodeName], pod.Status.PodIP) } } return nodeToTappedPodIPMap, nil } func waitForFinish(ctx context.Context, cancel context.CancelFunc) { sigChan := make(chan os.Signal, 1) signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT) // block until ctx cancel is called or termination signal is received select { case <-ctx.Done(): break case <-sigChan: cancel() } } func syncApiStatus(ctx context.Context, cancel context.CancelFunc, tappingOptions *MizuTapOptions) { controlSocketStr := fmt.Sprintf("ws://%s/ws", kubernetes.GetMizuCollectorProxiedHostAndPath(tappingOptions.GuiPort)) controlSocket, err := mizu.CreateControlSocket(controlSocketStr) if err != nil { fmt.Printf("error establishing control socket connection %s\n", err) cancel() } for { select { case <-ctx.Done(): return default: err = controlSocket.SendNewTappedPodsListMessage(currentlyTappedPods) if err != nil { rlog.Debugf("error Sending message via control socket %v, error: %s\n", controlSocketStr, err) } time.Sleep(10 * time.Second) } } } func getNamespace(tappingOptions *MizuTapOptions, kubernetesProvider *kubernetes.Provider) string { if tappingOptions.AllNamespaces { return mizu.K8sAllNamespaces } else if len(tappingOptions.Namespace) > 0 { return tappingOptions.Namespace } else { return kubernetesProvider.CurrentNamespace() } }
Frailty and malnutrition among Egyptian elderly: prevalence and risk factors in nursing home and community dwelling elderly AbstractBackground: frailty and malnutrition are common problems among elders. Studying these two concepts is very important as both of them are amenable for preventive interventions and are reversible.Aim: To measure the prevalence of frailty and malnutrition among two groups of Egyptian elderly; nursing homes and community dwellers and to identify risk factors of these two problems.Methods: the study included 350 elderly males and females aged 60 and over, 175 participants from nursing homes and 175 community dwellers. An interview questionnaire collected data about: sociodemographic data, health status, functionality of family (family APGAR score), screening for depression (5 item Geriatric Depression Scale) and cognitive status (Mini-Mental state). Frailty was measured using SHARE frailty index whereas nutritional status was measured using the Mini Nutritional Assessment questionnaire (MNA).Results: Prevalence of frailty was 77.1% among elderly in nursing homes compared to 66.3% among community dwellers with no significant difference between them. As regard Nutritional status, nursing home had significantly higher percentage of malnourished participants compared to community dwelling participants (43.4% Vs 30.9%). Using the 5 items GDS, 57.7% of elderly were at risk of depression while 42% had impaired cognitive function using the MMSE. Frailty and malnutrition were highly prevalent in older age, in females, in widowed elderly, in those living alone, in participants with dysfunctional family, with comorbidities, with more than three comorbidities, with ischemic heart disease, receiving more than 3 drugs, with depressive symptoms and in those with lower cognitive performancesConclusions: Frailty and malnutrition are highly prevalent problems among elderly in both nursing homes and
<filename>Pods/Headers/Public/ModulizedDemoDetail/Target_Detail.h // // Target_Detail.h // ModulizedDemoDetail // // Created by TobyoTenma on 08/01/2017. // Copyright © 2017 TobyoTenma. All rights reserved. // #import <UIKit/UIKit.h> @interface Target_Detail : NSObject - (UIViewController *)Action_viewController:(NSDictionary *)content; @end
def query_USPTO(searchName, searchTerms, df): for term in searchTerms: df1 = pypatent.Search(term).as_dataframe() df1['searchTerm'] = [term] * len(list(df1['title'])) df = df.append(df1, ignore_index = True) df = df.drop_duplicates(subset ="title") df = df.sort_values(by=['patent_date'], ascending = False) df = df.reset_index() del df['index'] patent_path = os.path.join('searchResults') if not os.path.isdir(patent_path): os.mkdir(patent_path) patent_path = os.path.join('searchResults', 'patents') if not os.path.isdir(patent_path): os.mkdir(patent_path) patent_file = os.path.join(patent_path, 'patents_' + term + '.csv') df.to_csv(patent_file) print('patentsRetrieved saved: ' + patent_file) return(df)
// // Note this should only be called on one rank // // Note also that this (along with Write_) may become a separate class for // different file types (e.g. text vs netcdf) // void UnstructuredObservations::InitFile_() { if (boost::filesystem::portable_file_name(filename_)) { fid_ = std::make_unique<std::ofstream>(filename_.c_str()); *fid_ << "# Observation File: " << filename_ << " column names:" << std::endl << "# -----------------------------------------------------------------------------" << std::endl << "# Observation Name: time [" << time_unit_ << "]" << std::endl; for (const auto& obs : observables_) { *fid_ << "# -----------------------------------------------------------------------------" << std::endl << "# Observation Name: " << obs->get_name() << std::endl << "# Region: " << obs->get_region() << std::endl << "# Functional: " << obs->get_functional() << std::endl << "# Variable: " << obs->get_variable() << std::endl << "# Number of Vectors: " << obs->get_num_vectors() << std::endl; if (obs->get_degree_of_freedom() >= 0) *fid_ << "# DoF: " << obs->get_degree_of_freedom() << std::endl; } *fid_ << "# =============================================================================" << std::endl; *fid_ << "\"time [" << time_unit_ << "]\""; for (const auto& obs : observables_) { if (obs->get_num_vectors() > 1) { for (int i=0; i!=obs->get_num_vectors(); ++i) *fid_ << delimiter_ << "\"" << obs->get_name() << " dof " << i << "\""; } else { *fid_ << delimiter_ << "\"" << obs->get_name() << "\""; } } *fid_ << std::endl << std::scientific; fid_->precision(12); } else { Errors::Message msg; msg << "Invalid filename for observation: \"" << filename_ << "\""; Exceptions::amanzi_throw(msg); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.orc; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; import java.util.List; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; public class TestTypeDescription { @Test public void testJson() { TypeDescription bin = TypeDescription.createBinary(); assertEquals("{\"category\": \"binary\", \"id\": 0, \"max\": 0}", bin.toJson()); assertEquals("binary", bin.toString()); TypeDescription struct = TypeDescription.createStruct() .addField("f1", TypeDescription.createInt()) .addField("f2", TypeDescription.createString()) .addField("f3", TypeDescription.createDecimal()); assertEquals("struct<f1:int,f2:string,f3:decimal(38,10)>", struct.toString()); assertEquals("{" + "\"category\": \"struct\", " + "\"id\": 0, \"max\": 3, " + "\"fields\": [\n" + "{ \"f1\": {\"category\": \"int\", \"id\": 1, \"max\": 1}},\n" + "{ \"f2\": {\"category\": \"string\", \"id\": 2, \"max\": 2}},\n" + "{ \"f3\": {\"category\": \"decimal\", \"id\": 3, \"max\": 3, \"precision\": 38, \"scale\": 10}}" + "]" + "}", struct.toJson()); struct = TypeDescription.createStruct() .addField("f1", TypeDescription.createUnion() .addUnionChild(TypeDescription.createByte()) .addUnionChild(TypeDescription.createDecimal() .withPrecision(20).withScale(10))) .addField("f2", TypeDescription.createStruct() .addField("f3", TypeDescription.createDate()) .addField("f4", TypeDescription.createDouble()) .addField("f5", TypeDescription.createBoolean())) .addField("f6", TypeDescription.createChar().withMaxLength(100)); assertEquals("struct<f1:uniontype<tinyint,decimal(20,10)>,f2:struct<f3:date,f4:double,f5:boolean>,f6:char(100)>", struct.toString()); assertEquals( "{\"category\": \"struct\", " + "\"id\": 0, " + "\"max\": 8, " + "\"fields\": [\n" + "{ \"f1\": {\"category\": \"uniontype\", \"id\": 1, \"max\": 3, \"children\": [\n" + " {\"category\": \"tinyint\", \"id\": 2, \"max\": 2},\n" + " {\"category\": \"decimal\", \"id\": 3, \"max\": 3, \"precision\": 20, \"scale\": 10}]}},\n" + "{ \"f2\": {\"category\": \"struct\", \"id\": 4, \"max\": 7, \"fields\": [\n" + "{ \"f3\": {\"category\": \"date\", \"id\": 5, \"max\": 5}},\n" + "{ \"f4\": {\"category\": \"double\", \"id\": 6, \"max\": 6}},\n" + "{ \"f5\": {\"category\": \"boolean\", \"id\": 7, \"max\": 7}}]}},\n" + "{ \"f6\": {\"category\": \"char\", \"id\": 8, \"max\": 8, \"length\": 100}}]}", struct.toJson()); } @Test public void testSpecialFieldNames() { TypeDescription type = TypeDescription.createStruct() .addField("foo bar", TypeDescription.createInt()) .addField("`some`thing`", TypeDescription.createInt()) .addField("èœ", TypeDescription.createInt()) .addField("1234567890_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ", TypeDescription.createInt()) .addField("'!@#$%^&*()-=_+", TypeDescription.createInt()); assertEquals("struct<`foo bar`:int,```some``thing```:int,`èœ`:int," + "1234567890_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ:int," + "`'!@#$%^&*()-=_+`:int>", type.toString()); } @Test public void testParserSimple() { TypeDescription expected = TypeDescription.createStruct() .addField("b1", TypeDescription.createBinary()) .addField("b2", TypeDescription.createBoolean()) .addField("b3", TypeDescription.createByte()) .addField("c", TypeDescription.createChar().withMaxLength(10)) .addField("d1", TypeDescription.createDate()) .addField("d2", TypeDescription.createDecimal().withScale(5).withPrecision(20)) .addField("d3", TypeDescription.createDouble()) .addField("fff", TypeDescription.createFloat()) .addField("int", TypeDescription.createInt()) .addField("l", TypeDescription.createList (TypeDescription.createLong())) .addField("map", TypeDescription.createMap (TypeDescription.createShort(), TypeDescription.createString())) .addField("str", TypeDescription.createStruct() .addField("u", TypeDescription.createUnion() .addUnionChild(TypeDescription.createTimestamp()) .addUnionChild(TypeDescription.createVarchar() .withMaxLength(100)))) .addField("tz", TypeDescription.createTimestampInstant()) .addField("ts", TypeDescription.createTimestamp()); String expectedStr = "struct<b1:binary,b2:boolean,b3:tinyint,c:char(10),d1:date," + "d2:decimal(20,5),d3:double,fff:float,int:int,l:array<bigint>," + "map:map<smallint,string>,str:struct<u:uniontype<timestamp," + "varchar(100)>>,tz:timestamp with local time zone,ts:timestamp>"; assertEquals(expectedStr, expected.toString()); TypeDescription actual = TypeDescription.fromString(expectedStr); assertEquals(expected, actual); assertEquals(expectedStr, actual.toString()); } @Test public void testParserUpper() { TypeDescription type = TypeDescription.fromString("BIGINT"); assertEquals(TypeDescription.Category.LONG, type.getCategory()); type = TypeDescription.fromString("STRUCT<MY_FIELD:INT>"); assertEquals(TypeDescription.Category.STRUCT, type.getCategory()); assertEquals("MY_FIELD", type.getFieldNames().get(0)); assertEquals(TypeDescription.Category.INT, type.getChildren().get(0).getCategory()); type = TypeDescription.fromString("UNIONTYPE< TIMESTAMP WITH LOCAL TIME ZONE >"); assertEquals(TypeDescription.Category.UNION, type.getCategory()); assertEquals(TypeDescription.Category.TIMESTAMP_INSTANT, type.getChildren().get(0).getCategory()); } @Test public void testSpecialFieldNameParser() { TypeDescription type = TypeDescription.fromString("struct<`foo bar`:int," + "```quotes```:double,`abc``def````ghi`:float>"); assertEquals(TypeDescription.Category.STRUCT, type.getCategory()); List<String> fields = type.getFieldNames(); assertEquals(3, fields.size()); assertEquals("foo bar", fields.get(0)); assertEquals("`quotes`", fields.get(1)); assertEquals("abc`def``ghi", fields.get(2)); } @Test public void testMissingField() { IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> { TypeDescription.fromString("struct<"); }); assertTrue(e.getMessage().contains("Missing name at 'struct<^'")); } @Test public void testQuotedField1() { IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> { TypeDescription.fromString("struct<`abc"); }); assertTrue(e.getMessage().contains("Unmatched quote at 'struct<^`abc'")); } @Test public void testQuotedField2() { IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> { TypeDescription.fromString("struct<``:int>"); }); assertTrue(e.getMessage().contains("Empty quoted field name at 'struct<``^:int>'")); } @Test public void testParserUnknownCategory() { IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> { TypeDescription.fromString("FOOBAR"); }); assertTrue(e.getMessage().contains("Can't parse category at 'FOOBAR^'")); } @Test public void testParserEmptyCategory() { IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> { TypeDescription.fromString("<int>"); }); assertTrue(e.getMessage().contains("Can't parse category at '^<int>'")); } @Test public void testParserMissingInt() { IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> { TypeDescription.fromString("char()"); }); assertTrue(e.getMessage().contains("Missing integer at 'char(^)'")); } @Test public void testParserMissingSize() { IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> { TypeDescription.fromString("struct<c:char>"); }); assertTrue(e.getMessage().contains("Missing required char '(' at 'struct<c:char^>'")); } @Test public void testParserExtraStuff() { IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> { TypeDescription.fromString("struct<i:int>,"); }); assertTrue(e.getMessage().contains("Extra characters at 'struct<i:int>^,'")); } @Test public void testConnectedListSubtrees() { TypeDescription type = TypeDescription.fromString("struct<field1:array<struct<field2:int>>>"); TypeDescription leaf = type.getChildren().get(0) .getChildren().get(0) .getChildren().get(0); assertEquals(3, leaf.getId()); assertEquals(0, type.getId()); assertEquals(3, leaf.getId()); } @Test public void testConnectedMapSubtrees() { TypeDescription type = TypeDescription.fromString("struct<field1:map<string,int>>"); TypeDescription leaf = type.getChildren().get(0).getChildren().get(0); assertEquals(2, leaf.getId()); assertEquals(0, type.getId()); assertEquals(2, leaf.getId()); } @Test public void testFindSubtype() { TypeDescription type = TypeDescription.fromString( "struct<a:int," + "b:struct<c:array<int>,d:map<string,struct<e:string>>>," + "f:string," + "g:uniontype<string,int>>"); assertEquals(0, type.findSubtype("0").getId()); assertEquals(1, type.findSubtype("a").getId()); assertEquals(2, type.findSubtype("b").getId()); assertEquals(3, type.findSubtype("b.c").getId()); assertEquals(4, type.findSubtype("b.c._elem").getId()); assertEquals(5, type.findSubtype("b.d").getId()); assertEquals(6, type.findSubtype("b.d._key").getId()); assertEquals(7, type.findSubtype("b.d._value").getId()); assertEquals(8, type.findSubtype("b.d._value.e").getId()); assertEquals(9, type.findSubtype("f").getId()); assertEquals(10, type.findSubtype("g").getId()); assertEquals(11, type.findSubtype("g.0").getId()); assertEquals(12, type.findSubtype("g.1").getId()); } @Test public void testBadFindSubtype() { TypeDescription type = TypeDescription.fromString( "struct<a:int," + "b:struct<c:array<int>,d:map<string,struct<e:string>>>," + "f:string," + "g:uniontype<string,int>>"); try { type.findSubtype("13"); fail(); } catch (IllegalArgumentException e) { // PASS } try { type.findSubtype("aa"); fail(); } catch (IllegalArgumentException e) { // PASS } try { type.findSubtype("b.a"); fail(); } catch (IllegalArgumentException e) { // PASS } try { type.findSubtype("g.2"); fail(); } catch (IllegalArgumentException e) { // PASS } try { type.findSubtype("b.c.d"); fail(); } catch (IllegalArgumentException e) { // PASS } } @Test public void testFindSubtypes() { TypeDescription type = TypeDescription.fromString( "struct<a:int," + "b:struct<c:array<int>,d:map<string,struct<e:string>>>," + "f:string," + "g:uniontype<string,int>>"); List<TypeDescription> results = type.findSubtypes("a"); assertEquals(1, results.size()); assertEquals(1, results.get(0).getId()); results = type.findSubtypes("b.d._value.e,3,g.0"); assertEquals(3, results.size()); assertEquals(8, results.get(0).getId()); assertEquals(3, results.get(1).getId()); assertEquals(11, results.get(2).getId()); results = type.findSubtypes(""); assertEquals(0, results.size()); } @Test public void testFindSubtypesAcid() { TypeDescription type = TypeDescription.fromString( "struct<operation:int,originalTransaction:bigint,bucket:int," + "rowId:bigint,currentTransaction:bigint," + "row:struct<col0:int,col1:struct<z:int,x:double,y:string>," + "col2:double>>"); List<TypeDescription> results = type.findSubtypes("col0"); assertEquals(1, results.size()); assertEquals(7, results.get(0).getId()); results = type.findSubtypes("col1,col2,col1.x,col1.z"); assertEquals(4, results.size()); assertEquals(8, results.get(0).getId()); assertEquals(12, results.get(1).getId()); assertEquals(10, results.get(2).getId()); assertEquals(9, results.get(3).getId()); results = type.findSubtypes(""); assertEquals(0, results.size()); } @Test public void testAttributes() throws IOException { TypeDescription schema = TypeDescription.fromString( "struct<" + "name:struct<first:string,last:string>," + "address:struct<street:string,city:string,country:string,post_code:string>," + "credit_cards:array<struct<card_number:string,expire:date,ccv:string>>>"); // set some attributes schema.findSubtype("name").setAttribute("iceberg.id", "12"); schema.findSubtype("address.street").setAttribute("mask", "nullify") .setAttribute("context", "pii"); TypeDescription clone = schema.clone(); assertEquals("12", clone.findSubtype("name").getAttributeValue("iceberg.id")); clone.findSubtype("name").removeAttribute("iceberg.id"); assertEquals(0, clone.findSubtype("name").getAttributeNames().size()); assertEquals(1, schema.findSubtype("name").getAttributeNames().size()); // write a file with those attributes Path path = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator + "tmp"), "attribute.orc"); Configuration conf = new Configuration(); Writer writer = OrcFile.createWriter(path, OrcFile.writerOptions(conf).setSchema(schema).overwrite(true)); writer.close(); // read the file back again Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf)); TypeDescription readerSchema = reader.getSchema(); // make sure that the read types have the attributes TypeDescription nameCol = readerSchema.findSubtype("name"); assertArrayEquals(new Object[]{"iceberg.id"}, nameCol.getAttributeNames().toArray()); assertEquals("12", nameCol.getAttributeValue("iceberg.id")); TypeDescription street = readerSchema.findSubtype("address.street"); assertArrayEquals(new Object[]{"context", "mask"}, street.getAttributeNames().toArray()); assertEquals("pii", street.getAttributeValue("context")); assertEquals("nullify", street.getAttributeValue("mask")); assertNull(street.getAttributeValue("foobar")); } @Test public void testAttributesEquality() { TypeDescription schema = TypeDescription.fromString( "struct<" + "name:struct<first:string,last:string>," + "address:struct<street:string,city:string,country:string,post_code:string>," + "credit_cards:array<struct<card_number:string,expire:date,ccv:string>>>"); // set some attributes schema.findSubtype("name").setAttribute("iceberg.id", "12"); schema.findSubtype("address.street").setAttribute("mask", "nullify") .setAttribute("context", "pii"); TypeDescription clone = schema.clone(); assertEquals(3, clearAttributes(clone)); assertNotEquals(clone, schema); assertTrue(clone.equals(schema, false)); } static int clearAttributes(TypeDescription schema) { int result = 0; for(String attribute: schema.getAttributeNames()) { schema.removeAttribute(attribute); result += 1; } List<TypeDescription> children = schema.getChildren(); if (children != null) { for (TypeDescription child : children) { result += clearAttributes(child); } } return result; } @Test public void testEncryption() { String schemaString = "struct<" + "name:struct<first:string,last:string>," + "address:struct<street:string,city:string,country:string,post_code:string>," + "credit_cards:array<struct<card_number:string,expire:date,ccv:string>>>"; TypeDescription schema = TypeDescription.fromString(schemaString); TypeDescription copy = TypeDescription.fromString(schemaString); assertEquals(copy, schema); // set some encryption schema.annotateEncryption("pii:name,address.street;credit:credit_cards", null); assertEquals("pii", schema.findSubtype("name").getAttributeValue(TypeDescription.ENCRYPT_ATTRIBUTE)); assertEquals("pii", schema.findSubtype("address.street").getAttributeValue(TypeDescription.ENCRYPT_ATTRIBUTE)); assertEquals("credit", schema.findSubtype("credit_cards").getAttributeValue(TypeDescription.ENCRYPT_ATTRIBUTE)); assertNotEquals(copy, schema); assertEquals(3, clearAttributes(schema)); assertEquals(copy, schema); schema.annotateEncryption("pii:name.first", "redact,Yy:name.first"); // check that we ignore if already set schema.annotateEncryption("pii:name.first", "redact,Yy:name.first,credit_cards"); assertEquals("pii", schema.findSubtype("name.first").getAttributeValue(TypeDescription.ENCRYPT_ATTRIBUTE)); assertEquals("redact,Yy", schema.findSubtype("name.first").getAttributeValue(TypeDescription.MASK_ATTRIBUTE)); assertEquals("redact,Yy", schema.findSubtype("credit_cards").getAttributeValue(TypeDescription.MASK_ATTRIBUTE)); assertEquals(3, clearAttributes(schema)); schema.annotateEncryption("pii:name", "redact:name.first;nullify:name.last"); assertEquals("pii", schema.findSubtype("name").getAttributeValue(TypeDescription.ENCRYPT_ATTRIBUTE)); assertEquals("redact", schema.findSubtype("name.first").getAttributeValue(TypeDescription.MASK_ATTRIBUTE)); assertEquals("nullify", schema.findSubtype("name.last").getAttributeValue(TypeDescription.MASK_ATTRIBUTE)); assertEquals(3, clearAttributes(schema)); } @Test public void testEncryptionConflict() { TypeDescription schema = TypeDescription.fromString( "struct<" + "name:struct<first:string,last:string>," + "address:struct<street:string,city:string,country:string,post_code:string>," + "credit_cards:array<struct<card_number:string,expire:date,ccv:string>>>"); // set some encryption assertThrows(IllegalArgumentException.class, () -> schema.annotateEncryption("pii:address,personal:address",null)); } @Test public void testMaskConflict() { TypeDescription schema = TypeDescription.fromString( "struct<" + "name:struct<first:string,last:string>," + "address:struct<street:string,city:string,country:string,post_code:string>," + "credit_cards:array<struct<card_number:string,expire:date,ccv:string>>>"); // set some encryption assertThrows(IllegalArgumentException.class, () -> schema.annotateEncryption(null,"nullify:name;sha256:name")); } @Test public void testGetFullFieldName() { TypeDescription schema = TypeDescription.fromString( "struct<" + "name:struct<first:string,last:string>," + "address:struct<street:string,city:string,country:string,post_code:string>," + "credit_cards:array<struct<card_number:string,expire:date,ccv:string>>," + "properties:map<string,uniontype<int,string>>>"); for (String column: new String[]{"0", "name", "name.first", "name.last", "address.street", "address.city", "credit_cards", "credit_cards._elem", "credit_cards._elem.card_number", "properties", "properties._key", "properties._value", "properties._value.0", "properties._value.1"}) { assertEquals(column, schema.findSubtype(column, true).getFullFieldName()); } } @Test public void testSetAttribute() { TypeDescription type = TypeDescription.fromString("int"); type.setAttribute("key1", null); assertEquals(0, type.getAttributeNames().size()); } @Test public void testHashCode() { // Should not throw NPE TypeDescription.fromString("int").hashCode(); } }
def end_search(t0): t1 = time.time() print('Search time: %.2f sec' % ((t1-t0))) print('------ o -------') return
/* * @test /nodynamiccopyright/ * @bug 8062373 * @summary Test that when inaccessible types constitute the inferred types of <> the compiler complains. * @compile/fail/ref=Neg19.out Neg19.java -XDrawDiagnostics */ class Neg19 { public static void main(String[] args) { new Neg19_01<Neg19>().foo(new Neg19_01<>()); // OK. new Neg19_01<Neg19>().foo(new Neg19_01<>() {}); // ERROR. } }
Authorities in San Marcos are searching for vandals who spray-painted a 300-year-old cypress tree on the San Marcos River. The tree, the oldest and largest cypress on the section of the San Marcos River around Thompson’s Island Park, was tagged with images of a flower, heart, and the words "peace," "love" and "your soul is gold." Authorities found the graffiti on July 1 in an area of the park that had been closed after flooding, and is expected to remain closed for the foreseeable future. PHOTOS: 300-year-old tree vandalized in popular San Marcos park The graffiti had been removed by Tuesday, by city officials have not been able to track down any suspects in the case. Though the park is closed, kayakers, canoers and tubers are still allowed to float through the area, but will not be allowed to stay on land any longer than necessary to get from one section of the river to another. Anyone found in the park can be cited or arrested for criminal trespass, city officials said. Anyone with information on the incident is asked to call Crime Stoppers at 1-800-324-8477 or the City Marshal’s Office at 512-393-8480. Crime Stoppers will pay up to $1,000 for information leading to the arrest of the person responsible. ]]
def delete_created_documents(self, collection, creation_time): col = self._get_collection(collection) self._database.aql.execute( f""" FOR d IN @@col FILTER d.{_FLD_CREATED} == @timestamp REMOVE d IN @@col """, bind_vars={'timestamp': creation_time, '@col': col.name}, )
// Weaker access - used for serialization, so it needs public. @SuppressWarnings("WeakerAccess") public final class PrimitiveMap { private static final Logger LOG = Logger.getInstance(PrimitiveMap.class); // Public so the serializer can work properly. Do not access directly. @SuppressWarnings("WeakerAccess") public Map<String, String> $proxy$ = new HashMap<>(); static class UnmarshalException extends Exception { UnmarshalException(String key, String expectedType, Object found) { super("Could not unmarshall [" + key + "]: expected " + expectedType + ", found " + found); } UnmarshalException(String key, String expectedType) { super("Could not unmarshall [" + key + "]: expected non-null " + expectedType); } } int getIntNullable(@NotNull String key, int defaultValue) throws UnmarshalException { return getNotNull(key, defaultValue, "int", Integer::parseInt); } @NotNull PrimitiveMap putInt(@NotNull String key, int value) { $proxy$.put(key, Integer.toString(value)); return this; } long getLongNullable(@NotNull String key, long defaultValue) throws UnmarshalException { return getNotNull(key, defaultValue, "long", Long::parseLong); } @NotNull PrimitiveMap putLong(@NotNull String key, long value) { $proxy$.put(key, Long.toString(value)); return this; } boolean getBooleanNullable(@NotNull String key, boolean defaultValue) throws UnmarshalException { return getNotNull(key, defaultValue, "boolean", Boolean::parseBoolean); } @NotNull PrimitiveMap putBoolean(@NotNull String key, boolean value) { $proxy$.put(key, Boolean.toString(value)); return this; } @Nullable String getStringNullable(String key, @Nullable String defaultValue) throws UnmarshalException { return getNullable(key, defaultValue, "string", (s) -> s); } @NotNull String getStringNotNull(String key) throws UnmarshalException { return getNotNull(key, "string", (s) -> s); } @NotNull PrimitiveMap putString(@NotNull String key, @Nullable String value) { if (value != null) { $proxy$.put(key, value); } return this; } @NotNull List<String> getStringList(String key) throws UnmarshalException { int size = getIntNullable(key + "__len", -1); if (size <= 0) { return Collections.emptyList(); } List<String> ret = new ArrayList<>(size); for (int i = 0; i < size; i++) { ret.add(getStringNotNull(key + "__" + i)); } return ret; } @NotNull PrimitiveMap putStringList(@NotNull String key, @Nullable List<String> values) { if (values != null) { putInt(key + "__len", values.size()); for (int i = 0; i < values.size(); i++) { putString(key + "__" + i, values.get(i)); } } return this; } @NotNull PrimitiveMap putStringList(@NotNull String key, @Nullable Stream<String> values) { if (values != null) { putStringList(key, values.collect(Collectors.toList())); } return this; } boolean containsKey(String key) { return $proxy$.containsKey(key); } // Higher level getters and setters. Still stores only the primitive data, but makes it easier to use. @Nullable FilePath getFilePathNullable(String key) throws UnmarshalException { String path = getStringNullable(key, null); if (path == null) { return null; } return VcsUtil.getFilePath(path); } @NotNull FilePath getFilePathNotNull(String key) throws UnmarshalException { String path = getStringNotNull(key); return VcsUtil.getFilePath(path); } @NotNull PrimitiveMap putFilePath(@NotNull String key, @Nullable FilePath path) { if (path != null) { return putString(key, path.getPath()); } return this; } @NotNull List<FilePath> getFilePathList(@NotNull String key) throws UnmarshalException { return getStringList(key) .stream() .map(VcsUtil::getFilePath) .collect(Collectors.toList()); } @NotNull PrimitiveMap putFilePathList(String key, @Nullable List<FilePath> paths) { if (paths != null) { return putStringList(key, paths.stream() .map(FilePath::getPath)); } return this; } @Nullable ClientServerRef getClientServerRefNullable(@NotNull String key) throws UnmarshalException { String serverPort = getStringNullable(key + "__serverPort", null); String clientName = getStringNullable(key + "__clientName", null); if (serverPort == null) { return null; } P4ServerName server = P4ServerName.forPortNotNull(serverPort); return new ClientServerRef(server, clientName); } @NotNull ClientServerRef getClientServerRefNotNull(@NotNull String key) throws UnmarshalException { String serverPort = getStringNotNull(key + "__serverPort"); String clientName = getStringNullable(key + "__clientName", null); P4ServerName server = P4ServerName.forPortNotNull(serverPort); return new ClientServerRef(server, clientName); } @NotNull PrimitiveMap putClientServerRef(@NotNull String key, @Nullable ClientServerRef ref) { if (ref == null) { return this; } return putString(key + "__serverPort", ref.getServerName().getFullPort()) .putString(key + "__clientName", ref.getClientName()); } @Nullable P4ChangelistId getChangelistIdNullable(@NotNull String key) throws UnmarshalException { ClientServerRef ref = getClientServerRefNullable(key + "__ref"); if (ref == null) { return null; } int id = getIntNullable(key + "__id", -1); if (id == -1) { return null; } return new P4ChangelistIdImpl(id, ref); } @NotNull P4ChangelistId getChangelistIdNotNull(@NotNull String key) throws UnmarshalException { ClientServerRef ref = getClientServerRefNullable(key + "__ref"); if (ref == null) { throw new UnmarshalException(key + "__ref__serverPort", "port"); } int id = getIntNullable(key + "__id", -1); // Allow -1 return new P4ChangelistIdImpl(id, ref); } @NotNull PrimitiveMap putChangelistId(@NotNull String key, @Nullable P4ChangelistId id) { if (id == null) { return this; } return putClientServerRef(key + "__ref", id.getClientServerRef()) .putInt(key + "__id", id.getChangelistId()); } @NotNull P4Job getP4Job(@NotNull String key) throws UnmarshalException { String jobId = getStringNotNull(key + "__id"); String description = getStringNotNull(key + "__desc"); List<String> keys = getStringList(key + "__detailskeys"); Map<String, Object> details = new HashMap<>(); for (String descKey : keys) { String type = getStringNotNull(key + "__detailstype__" + descKey); if ("string".equals(type)) { details.put(descKey, getStringNotNull(key + "__details__" + descKey)); } else if ("list".equals(type)) { details.put(descKey, getStringList(key + "__details__" + descKey)); } else if ("int".equals(type)) { details.put(descKey, getIntNullable(key + "__details__" + descKey, -1)); } else if ("long".equals(type)) { // covers date type details.put(descKey, getLongNullable(key + "__details__" + descKey, -1)); } } return new P4JobImpl(jobId, description, details); } @NotNull PrimitiveMap putP4Job(@NotNull String key, @Nullable P4Job job) { if (job == null) { return this; } // Need to convert the details to a primitive map. putStringList(key + "__detailskeys", job.getRawDetails().keySet().stream()); for (Map.Entry<String, Object> entry : job.getRawDetails().entrySet()) { Object value = entry.getValue(); if (value instanceof String) { putString(key + "__detailstype__" + entry.getKey(), "string"); putString(key + "__details__" + entry.getKey(), (String) value); } else if (value instanceof List) { putString(key + "__detailstype__" + entry.getKey(), "list"); //noinspection unchecked putStringList(key + "__details__" + entry.getKey(), (List) value); } else if (value instanceof Integer) { putString(key + "__detailstype__" + entry.getKey(), "int"); putInt(key + "__details__" + entry.getKey(), (Integer) value); } else if (value instanceof Number) { putString(key + "__detailstype__" + entry.getKey(), "long"); putLong(key + "__details__" + entry.getKey(), ((Number) value).longValue()); } else if (value instanceof Date) { putString(key + "__detailstype__" + entry.getKey(), "long"); putLong(key + "__details__" + entry.getKey(), ((Date) value).getTime()); } else if (value != null) { LOG.warn("Unexpected value type in Perforce job details: " + value.getClass() + " (value " + value + "; key " + entry.getKey() + ")"); } } return putString(key + "__id", job.getJobId()) .putString(key + "__desc", job.getDescription()); } @Nullable private <T> T getNullable(@NotNull String key, @Nullable T defaultValue, @NotNull String type, @NotNull Function<String, T> map) throws UnmarshalException { if (!containsKey(key)) { return defaultValue; } String value = $proxy$.get(key); try { return map.apply(value); } catch (ClassCastException | NumberFormatException e) { // No need to log the exception, because we're just at the same level as // the low-level cast error source. throw new UnmarshalException(key, type, value); } } @NotNull private <T> T getNotNull(@NotNull String key, @NotNull String type, @NotNull Function<String, T> map) throws UnmarshalException { if (!containsKey(key)) { throw new UnmarshalException(key, type); } String value = $proxy$.get(key); if (value == null) { throw new UnmarshalException(key, type); } try { return map.apply(value); } catch (ClassCastException | NumberFormatException e) { // No need to log the exception, because we're just at the same level as // the low-level cast error source. throw new UnmarshalException(key, type, value); } } @NotNull private <T> T getNotNull(String key, @NotNull T defaultValue, @NotNull String type, @NotNull Function<String, T> map) throws UnmarshalException { if (!containsKey(key)) { return defaultValue; } return getNotNull(key, type, map); } }
import React from 'react'; import 'bootstrap-icons/font/bootstrap-icons.css'; import 'bootstrap/dist/css/bootstrap.min.css'; import 'bootstrap/dist/js/bootstrap.bundle.min'; import { BrowserRouter, Route, Switch } from 'react-router-dom'; import { AuthWrapper } from 'core-auth'; import AuthenticatedApp from './components/AuthenticatedApp'; import client from './app/client'; const App: React.FC = () => { const scope = process?.env?.REACT_APP_OAUTH_SCOPE; const clientId = process?.env?.REACT_APP_OAUTH_CLIENT_ID; const issuer = process?.env?.REACT_APP_OIDC_ISSUER; const redirectUri = process?.env?.REACT_APP_OAUTH_REDIRECT_URI; if (!clientId || !issuer || !scope || !redirectUri) { return <></>; } const scopes = scope.split(' '); const baseUrl = new URL(redirectUri); return ( <BrowserRouter basename={`${baseUrl.pathname}`}> <Switch> <Route path="" render={() => ( <AuthWrapper clientId={clientId} onTokenChange={client.setToken} scopes={scopes} issuer={issuer} redirectUri={redirectUri} > <AuthenticatedApp /> </AuthWrapper> )} /> </Switch> </BrowserRouter> ); }; export default App;
THEORETICAL FUNDAMENTALS OF STRATEGIC MANAGEMENT OF HUMAN CAPITAL OF THE ENTERPRISE The article is devoted to the study of theoretical approaches to the interpretation of the essence of the category of human capital and the definition of conceptual provisions of strategic management of human capital in enterprises in modern economic conditions. The article considers the main provisions of traditional, alternative, extended and other approaches to the interpretation of the term “human capital” by foreign scholars. Perspectives on the essence and content of human capital are presented by some Ukrainian scientists. The article is devoted to the study of theoretical approaches to the interpretation of the essence of the category of human capital and the definition of conceptual provisions of strategic management of human capital in enterprises in modern economic conditions. The article considers the main provisions of traditional, alternative, extended and other approaches to the interpretation of the term “human capital” by foreign scholars. Perspectives on the essence and content of human capital are presented by some Ukrainian scientists. The target orientation of the research of the substantive essence of the category “human capital” is determined, which is to assess the possibility of its development and the ability to manage it taking into account the external and internal environment of the enterprise. It is proved that human capital should be considered as a new effective resource, the use of which contributes to the socio-economic development of the enterprise and the state, investment in human capital provides long-term socio-economic multiplier effect for all market participants. It is determined that strategic human capital management is aimed at ensuring the efficiency of economic activity and sustainable development of the enterprise through the effective formation, development and use of human capital. It is noted that strategic human capital management should be considered in the context of strategic management of intellectual capital of the enterprise. The place and role of strategic analysis and evaluation of human capital quality in strategic human capital management of the enterprise are outlined. The specific features of strategic human capital management of the enterprise are considered, in particular, such as time length, orientation on the principle of balance of human capital flows, consideration of each employee of the enterprise as his strategic partner. It was concluded that strategic human capital management contributes to the added value of the enterprise, increase the level of corporate culture, and improve the image of the enterprise.
/** * A background thread that simulates database failures. Database is assumed to * have crashed during the provided intervals. Applications shouldn't contact * database if the {@link #failed} is true. Precision is second. * * @author haoyuh * */ public class DBSimulator implements Callable<Void> { private final AtomicBoolean failed; private final long[] invtervals; private int index; private long now; private boolean isRunning = true; /** * @param failed * A flag indicating the status of database. True means database * has crashed. False means database is alive. * @param invtervals * [start, end, start, end...]. Database is considered as failed * during the given interval. Start and end are relative time to * when this class is created. Time unit is second. */ public DBSimulator(AtomicBoolean failed, long[] invtervals) { super(); this.failed = failed; this.invtervals = invtervals; this.now = System.currentTimeMillis(); System.out.println("created db state simulator"); } @Override public Void call() throws Exception { while (isRunning) { long start = invtervals[index]; long end = invtervals[index + 1]; long now = (System.currentTimeMillis() - this.now) / 1000; if (now >= start && !failed.get()) { System.out.println("Crash at " + System.nanoTime()); failed.set(true); } else if (now >= end) { System.out.println("Back at " + System.nanoTime()); failed.set(false); System.out.println("#########dbfailed " + failed.get()); index += 2; if (index >= invtervals.length) { break; } } Thread.sleep(1000); } return null; } public void shutdown() { isRunning = false; } public long[] getInvtervals() { return invtervals; } }
// broadcastTicket will ensure that the local dcrd instance is aware of the // provided ticket. // Ticket hash, ticket hex, and parent hex are parsed from the request body and // validated. They are broadcast to the network using SendRawTransaction if dcrd // is not aware of them. func broadcastTicket() gin.HandlerFunc { return func(c *gin.Context) { const funcName = "broadcastTicket" reqBytes, err := drainAndReplaceBody(c.Request) if err != nil { log.Warnf("%s: Error reading request (clientIP=%s): %v", funcName, c.ClientIP(), err) sendErrorWithMsg(err.Error(), errBadRequest, c) return } var request struct { TicketHex string `json:"tickethex" binding:"required"` TicketHash string `json:"tickethash" binding:"required"` ParentHex string `json:"parenthex" binding:"required"` } if err := binding.JSON.BindBody(reqBytes, &request); err != nil { log.Warnf("%s: Bad request (clientIP=%s): %v", funcName, c.ClientIP(), err) sendErrorWithMsg(err.Error(), errBadRequest, c) return } msgTx, err := decodeTransaction(request.TicketHex) if err != nil { log.Errorf("%s: Failed to decode ticket hex (ticketHash=%s): %v", funcName, request.TicketHash, err) sendErrorWithMsg("cannot decode ticket hex", errBadRequest, c) return } err = isValidTicket(msgTx) if err != nil { log.Warnf("%s: Invalid ticket (clientIP=%s, ticketHash=%s): %v", funcName, c.ClientIP(), request.TicketHash, err) sendError(errInvalidTicket, c) return } if msgTx.TxHash().String() != request.TicketHash { log.Warnf("%s: Ticket hex/hash mismatch (clientIP=%s, ticketHash=%s)", funcName, c.ClientIP(), request.TicketHash) sendErrorWithMsg("ticket hex does not match hash", errBadRequest, c) return } parentTx, err := decodeTransaction(request.ParentHex) if err != nil { log.Errorf("%s: Failed to decode parent hex (ticketHash=%s): %v", funcName, request.TicketHash, err) sendErrorWithMsg("cannot decode parent hex", errBadRequest, c) return } parentHash := parentTx.TxHash() dcrdClient := c.MustGet(dcrdKey).(*rpc.DcrdRPC) dcrdErr := c.MustGet(dcrdErrorKey) if dcrdErr != nil { log.Errorf("%s: could not get dcrd client: %v", funcName, dcrdErr.(error)) sendError(errInternalError, c) return } _, err = dcrdClient.GetRawTransaction(parentHash.String()) var e *wsrpc.Error if err == nil { } else if errors.As(err, &e) && e.Code == rpc.ErrNoTxInfo { var found bool for _, txIn := range msgTx.TxIn { if !txIn.PreviousOutPoint.Hash.IsEqual(&parentHash) { continue } found = true break } if !found { log.Errorf("%s: Invalid ticket parent (ticketHash=%s)", funcName, request.TicketHash) sendErrorWithMsg("invalid ticket parent", errBadRequest, c) return } log.Debugf("%s: Broadcasting parent tx %s (ticketHash=%s)", funcName, parentHash, request.TicketHash) err = dcrdClient.SendRawTransaction(request.ParentHex) if err != nil { log.Errorf("%s: dcrd.SendRawTransaction for parent tx failed (ticketHash=%s): %v", funcName, request.TicketHash, err) sendError(errCannotBroadcastTicket, c) return } } else { log.Errorf("%s: dcrd.GetRawTransaction for ticket parent failed (ticketHash=%s): %v", funcName, request.TicketHash, err) sendError(errInternalError, c) return } _, err = dcrdClient.GetRawTransaction(request.TicketHash) if err == nil { return } if errors.As(err, &e) && e.Code == rpc.ErrNoTxInfo { log.Debugf("%s: Broadcasting ticket (ticketHash=%s)", funcName, request.TicketHash) err = dcrdClient.SendRawTransaction(request.TicketHex) if err != nil { log.Errorf("%s: dcrd.SendRawTransaction for ticket failed (ticketHash=%s): %v", funcName, request.TicketHash, err) sendError(errCannotBroadcastTicket, c) return } } else { log.Errorf("%s: dcrd.GetRawTransaction for ticket failed (ticketHash=%s): %v", funcName, request.TicketHash, err) sendError(errInternalError, c) return } } }
""" Make a population using synthpops with school types and mixing types within schools defined. """ import sciris as sc import synthpops as sp import matplotlib.pyplot as plt import cmasher as cmr import cmocean pars = dict( n = 40e3, rand_seed = 123, location = 'Spokane_County', state_location = 'Washington', country_location = 'usa', smooth_ages = 1, household_method = 'fixed_ages', with_facilities = 1, with_non_teaching_staff = 1, # also include non teaching staff with_school_types = 1, school_mixing_type = {'pk': 'random', 'es': 'age_and_class_clustered', 'ms': 'age_and_class_clustered', 'hs': 'age_clustered', } ) pop = sp.Pop(**pars) kwargs = sc.dcp(pars) kwargs['cmap'] = cmr.get_sub_cmap('cmo.curl', 0.05, 1) # let's change the colormap used a little fig, ax = pop.plot_school_sizes(**kwargs) # plot school sizes by school type plt.show()
import pickle import numpy as np import tensorflow as tf def unpickle(file): with open(file, 'rb') as fo: dict = pickle.load(fo, encoding='bytes') return dict def load_data_single(file): data = unpickle(file) result = [] labels = data[b'labels'] data = data[b'data'] for index in range(len(data)): temp = data[index] temp.resize(32, 32, 3) result.append(temp) return np.array(result), labels TRAIN_DATA_PATHS = "./cifar_data/data_batch_%d" TEST_DATA_PATH = "./cifar_data/test_batch" # train_data = [] # train_labels = [] # # # # for i in range(1, 6): # # temp_data, temp_labels = load_data_single(TRAIN_DATA_PATHS % i) # # train_labels.extend(temp_labels) # # train_data.extend(temp_data) # # # train_data = np.array(train_data) # train_labels = np.array(train_labels) data, labels = load_data_single(TEST_DATA_PATH) print(len(labels)) np.savez("cifar-10-test-data.npz", data=data, labels=labels) # np.savez("cifar-10-train-data.npz", data=train_data, labels=train_labels) # w = tf.io.TFRecordWriter("./cifar-10-val-data.tfrecords") # for i in range(10000): # example = tf.train.Example( # features=tf.train.Features( # feature={ # "data": tf.train.Feature(bytes_list=tf.train.BytesList(value=[data[i].tobytes()])), # "label": tf.train.Feature(int64_list=tf.train.Int64List(value=[labels[i]])), # } # ) # ) # w.write(example.SerializeToString()) # # w.close()
from flask import Blueprint from flask_login import login_required from flask import (flash, redirect, url_for, render_template, request) from flask_login import current_user from appi2c.ext.group.group_forms import (GroupForm, EditGroupForm) from appi2c.ext.group.group_controller import (create_group, list_all_group, list_group_id, update_group, delete_group_id, folder_admin, upload_files, allowed_image_filesize, get_image, num_group_user) from appi2c.ext.device.device_controller import (list_num_devices_in_group, list_device_in_group) from appi2c.ext.icon.icon_controller import list_icon_in_device bp = Blueprint('groups', __name__, template_folder="appi2c/templates/group") @bp.route("/register/group", methods=['GET', 'POST']) @login_required def register_group(): form = GroupForm() if request.method == "POST": if form.validate_on_submit(): uploaded_file = request.files['file'] folder_admin() if "filesize" in request.cookies: if not allowed_image_filesize(request.cookies["filesize"]): flash("Filesize exceeded maximum limit of 10MB", "error") return redirect(request.url) if upload_files(uploaded_file): create_group(name=form.name.data, description=form.description.data, file=uploaded_file.filename, user=current_user.id) flash('Group ' + form.name.data + ' has benn created!', 'success') return redirect(url_for('groups.group_opts')) flash('That file extension is not allowed', 'error') return redirect(request.url) return render_template('group/group_create.html', title='Group Register', form=form) @bp.route('/edit/group/<int:id>', methods=['GET', 'POST']) @login_required def edit_group(id): form = EditGroupForm() current_group = list_group_id(id) img = get_image(id) if form.validate_on_submit(): uploaded_file = request.files['file'] current_group.id = form.id.data current_group.name = form.name.data current_group.description = form.description.data if "filesize" in request.cookies: if not allowed_image_filesize(request.cookies["filesize"]): flash("Filesize exceeded maximum limit of 10MB", "error") return redirect(request.url) if upload_files(uploaded_file): update_group(id, current_group.name, current_group.description, uploaded_file.filename) flash('Your changes have been saved.', 'success') return redirect(url_for('groups.group_opts')) else: name_img = current_group.file update_group(id, current_group.name, current_group.description, name_img) flash('Your changes have been saved.', 'success') return redirect(url_for('groups.group_opts')) elif request.method == 'GET': form.id.data = current_group.id form.name.data = current_group.name form.description.data = current_group.description form.file.data = current_group.file return render_template('group/group_edit.html', title='Edit Group', form=form, img=img) @bp.route("/list/group", methods=['GET', 'POST']) @login_required def list_group(): groups = list_all_group(current_user) num_devices = list_num_devices_in_group(groups) if not groups: flash('There are no records. Register a Group', 'error') return redirect(url_for('groups.group_opts')) return render_template("group/group_list.html", title='Group List', obj=zip(groups, num_devices)) @bp.route("/admin/group", methods=['GET', 'POST']) @login_required def admin_group(): groups = list_all_group(current_user) if not groups: flash('There are no records. Register a Group', 'error') return redirect(url_for('groups.group_opts')) return render_template('group/group_admin.html', title='Group Admin', groups=groups) @bp.route('/delete/group/<int:id>', methods=['GET', 'POST']) @login_required def delete_group(id): delete = delete_group_id(id) if delete: if num_group_user(current_user) > 0: return redirect(url_for('groups.admin_group')) else: return redirect(url_for('groups.group_opts')) else: flash('The group contains devices. First remove the devices.', 'error') return redirect(url_for('groups.admin_group')) @bp.route("/options/group", methods=['GET', 'POST']) @login_required def group_opts(): return render_template("group/group_opts.html", title='Group Options') @bp.route('/group/blueprint/<int:id>', methods=['GET', 'POST']) @login_required def content_group(id): image = get_image(id) group = list_group_id(id) devices = list_device_in_group(group) icons = list_icon_in_device(devices) return render_template('group/group_content.html', image=image, group=group, obj=zip(devices, icons)) @bp.route('/group/controller/<int:id>', methods=['GET', 'POST']) @login_required def controller_group(id): group = list_group_id(id) devices = list_device_in_group(group) icons = list_icon_in_device(devices) return render_template('group/group_controller.html', group=group, obj=zip(devices, icons)) <EMAIL>(413) #def too_large(e): # flash("The size of image Exceeds the 2 MB allowed", 'error') # return redirect(url_for('groups.register_group')) @bp.route('/upload', methods=['POST', 'GET']) def upload(): return render_template('testejs.html')
# Contest: Codeforces Round #605 (Div. 3) (https://codeforces.com/contest/1272) # Problem: D: Remove One Element (https://codeforces.com/contest/1272/problem/D) def rint(): return int(input()) def rints(): return list(map(int, input().split())) n = rint() a = rints() rng = [] l = 0 for i in range(1, len(a)): if a[i] <= a[i - 1]: rng.append((l, i)) l = i rng.append((l, n)) mx = max(r - l for l, r in rng) for i in range(len(rng) - 1): l1, r1 = rng[i] l2, r2 = rng[i + 1] if r1 - l1 == 1 or r2 - l2 == 1: continue if a[r1 - 2] < a[l2] or a[r1 - 1] < a[l2 + 1]: mx = max(mx, r2 - l1 - 1) print(mx)
<filename>005.desktop_record/desktop_recorder.h #pragma once #include <thread> #include <mutex> #include <atomic> class desktop_recorder { public: ~desktop_recorder() { stop(); } static desktop_recorder* getInstance() { static desktop_recorder rec; return &rec; } bool start(const char* fileName, int fps = 10, int outWidth = 800, int outHeight = 600); void stop(); protected: void run(); protected: std::thread thread_ = {}; std::mutex mutex_ = {}; std::atomic_bool running_ = false; desktop_recorder() {} };
def under_sampling(xTrain, yTrain, neighbors=200): xTrainNames = xTrain.columns.values.tolist() yTrainNames = yTrain.columns.values.tolist() model = AllKNN(random_state=42, ratio='majority', n_neighbors=neighbors) xTrain, yTrain = model.fit_sample(xTrain, yTrain) xTrain = pd.DataFrame(xTrain, columns=[xTrainNames]) yTrain = pd.DataFrame(yTrain, columns=[yTrainNames]) return xTrain, yTrain
Functional Inquiry within the Perspective of Design Criticism ——Take the Practicality and Design of the Power Strip as an Example : Functionality is a criterion for judging the value of design and is prevalent in all kinds of design criticism. The direct object of design criticism is the design work, and design practice is the basis of design criticism. Design criticism must be based on an in-depth analysis of the design work, and a deep feeling and accurate grasp of the work is the starting point for design criticism. By analyzing the unreasonable problems that exist in the application of the design of plug-in boards in everyday life and finding actual cases where the unreasonable problems have been solved, the theoretical value of design criticism can be reflected, with a view to promoting the healthy development of design creation
#!/usr/bin/env python import sys def write_to_output(my_class,feats, output): my_str = my_class for name, value in feats: my_str += '\t'+name+'='+value output.write(my_str.encode('utf-8')+'\n') ######################################################################### # EXTRACTION OF FEATURES FOR TRAINING THE RELATION CLASSIFIER EXP --> TARGET ######################################################################### # This function extracts features for the relation between expression adn target # for the svm classifier def extract_feats_exp_tar(exp_ids,tar_ids,knaf_obj, use_lemmas=True, use_tokens=True, use_dependencies=True): all_feats = [] data_for_token = {} # [token_id] -> (word, sentence_id) for num_token, token_obj in enumerate(knaf_obj.get_tokens()): word = token_obj.get_text() s_id = token_obj.get_sent() w_id = token_obj.get_id() data_for_token[w_id] = (word,s_id,num_token) # Loading data for terms data_for_term = {} # [term_id] -> (lemma, span_token_ids) for term in knaf_obj.get_terms(): termid = term.get_id() lemma = term.get_lemma() span = term.get_span() span_token_ids = [] if span is not None: span_token_ids = span.get_span_ids() data_for_term[termid] = (lemma,span_token_ids) sentence_for_exp = None avg_position_exp = 0 n_toks = 0 for my_id in exp_ids: lemma, span_tok_ids = data_for_term[my_id] if use_lemmas: all_feats.append(('lemmaExp',lemma)) for tok_id in span_tok_ids: token,sent_id,num_token = data_for_token[tok_id] avg_position_exp += num_token n_toks += 1 if use_tokens: all_feats.append(('tokenExp',token)) if sentence_for_exp is None: sentence_for_exp = sent_id avg_position_exp = avg_position_exp * 1.0 / n_toks #Lemmas for target sentence_for_tar = None avg_position_tar = 0 n_toks = 0 for my_id in tar_ids: lemma, span_tok_ids = data_for_term[my_id] if use_lemmas: all_feats.append(('lemmaTar',lemma)) for tok_id in span_tok_ids: token,sent_id,num_token = data_for_token[tok_id] avg_position_tar += num_token n_toks += 1 if use_tokens: all_feats.append(('tokenTar',token)) if sentence_for_tar is None: sentence_for_tar = sent_id avg_position_tar = avg_position_tar * 1.0 / n_toks if use_dependencies: dependency_extractor = knaf_obj.get_dependency_extractor() if dependency_extractor is not None: deps = dependency_extractor.get_shortest_path_spans(exp_ids,tar_ids) if deps is not None: all_feats.append(('deps-exp-tar','#'.join(deps))) if sentence_for_exp is not None and sentence_for_tar is not None and sentence_for_exp == sentence_for_tar: all_feats.append(('same_sentence','yes')) else: all_feats.append(('same_sentence','no')) ##Distance dist = abs(avg_position_exp - avg_position_tar) if dist <= 10: my_dist = 'veryclose' elif dist <=20: my_dist = 'close' elif dist <=25: my_dist = 'far' else: my_dist = 'veryfar' all_feats.append(('distExpTar',my_dist)) return all_feats def create_rel_exp_tar_training(knaf_obj, output=sys.stdout, valid_opinions=None,use_dependencies=True,use_tokens=True, use_lemmas=True): # Obtain pairs of features for Expression and Target pairs = [] # [(Exp,Tar), (E,T), (E,T)....] for opinion in knaf_obj.get_opinions(): opi_id = opinion.get_id() opi_exp = opinion.get_expression() exp_type = '' exp_ids = [] if opi_exp is not None: exp_type = opi_exp.get_polarity() span = opi_exp.get_span() if span is not None: exp_ids = span.get_span_ids() opi_tar = opinion.get_target() tar_ids = [] if opi_tar is not None: span = opi_tar.get_span() if span is not None: tar_ids = span.get_span_ids() if valid_opinions is not None: if exp_type not in valid_opinions: continue ## This opinions will not be used if len(tar_ids) != 0 and len(exp_ids) != 0: pairs.append((exp_ids,tar_ids)) #extract_feats_exp_tar(exp_ids,tar_ids,knaf_obj, use_lemmas=True, use_tokens=True, use_dependencies=True) for idx1, (exp1, tar1) in enumerate(pairs): feats_positive = extract_feats_exp_tar(exp1,tar1,knaf_obj,use_dependencies=use_dependencies, use_tokens=use_tokens,use_lemmas=use_lemmas) write_to_output('+1', feats_positive, output) for idx2, (exp2, tar2) in enumerate(pairs): if idx1 != idx2: feats_negative = extract_feats_exp_tar(exp1,tar2,knaf_obj,use_dependencies=use_dependencies, use_tokens=use_tokens,use_lemmas=use_lemmas) write_to_output('-1', feats_negative, output) def extract_feats_exp_hol(exp_ids,hol_ids,knaf_obj, use_lemmas=True, use_tokens=True, use_dependencies=True): all_feats = [] data_for_token = {} # [token_id] -> (word, sentence_id) for num_token, token_obj in enumerate(knaf_obj.get_tokens()): word = token_obj.get_text() s_id = token_obj.get_sent() w_id = token_obj.get_id() data_for_token[w_id] = (word,s_id,num_token) # Loading data for terms data_for_term = {} # [term_id] -> (lemma, span_token_ids) for term in knaf_obj.get_terms(): termid = term.get_id() lemma = term.get_lemma() span = term.get_span() span_token_ids = [] if span is not None: span_token_ids = span.get_span_ids() data_for_term[termid] = (lemma,span_token_ids) sentence_for_exp = None avg_position_exp = 0 n_toks = 0 for my_id in exp_ids: lemma, span_tok_ids = data_for_term[my_id] if use_lemmas: all_feats.append(('lemmaExp',lemma)) for tok_id in span_tok_ids: token,sent_id,num_token = data_for_token[tok_id] avg_position_exp += num_token n_toks += 1 if use_tokens: all_feats.append(('tokenExp',token)) if sentence_for_exp is None: sentence_for_exp = sent_id avg_position_exp = avg_position_exp * 1.0 / n_toks #Lemmas for HOLDER sentence_for_hol = None avg_position_hol = 0 n_toks = 0 for my_id in hol_ids: lemma, span_tok_ids = data_for_term[my_id] if use_lemmas: all_feats.append(('lemmaHol',lemma)) for tok_id in span_tok_ids: token,sent_id,num_token = data_for_token[tok_id] avg_position_hol += num_token n_toks += 1 if use_tokens: all_feats.append(('tokenHol',token)) if sentence_for_hol is None: sentence_for_hol = sent_id avg_position_hol = avg_position_hol * 1.0 / n_toks if use_dependencies: dependency_extractor = knaf_obj.get_dependency_extractor() if dependency_extractor is not None: deps = dependency_extractor.get_shortest_path_spans(exp_ids,hol_ids) if deps is not None: all_feats.append(('deps-exp-hol','#'.join(deps))) if sentence_for_exp is not None and sentence_for_hol is not None and sentence_for_exp == sentence_for_hol: all_feats.append(('same_sentence','yes')) else: all_feats.append(('same_sentence','no')) ##Distance dist = abs(avg_position_exp - avg_position_hol) if dist <= 10: my_dist = 'veryclose' elif dist <=20: my_dist = 'close' elif dist <=25: my_dist = 'far' else: my_dist = 'veryfar' all_feats.append(('distExpHol',my_dist)) #all_feats.append(('absDist',str(dist))) return all_feats def create_rel_exp_hol_training(knaf_obj, output=sys.stdout, valid_opinions=None,use_dependencies=True,use_tokens=True,use_lemmas=True): # Obtain pairs of features for Expression and Holder pairs = [] # [(Exp,Hol), (E,H), (E,H)....] for opinion in knaf_obj.get_opinions(): opi_exp = opinion.get_expression() exp_type = '' exp_ids = [] if opi_exp is not None: exp_type = opi_exp.get_polarity() span = opi_exp.get_span() if span is not None: exp_ids = span.get_span_ids() opi_hol = opinion.get_holder() hol_ids = [] if opi_hol is not None: span = opi_hol.get_span() if span is not None: hol_ids = span.get_span_ids() if valid_opinions is not None: if exp_type not in valid_opinions: continue ## This opinions will not be used if len(exp_ids) != 0 and len(hol_ids) != 0: pairs.append((exp_ids,hol_ids)) #for feat_exp, feat_tar for idx1, (expids1, tarids1) in enumerate(pairs): feats_positive = extract_feats_exp_hol(expids1,tarids1,knaf_obj, use_dependencies=use_dependencies,use_tokens=use_tokens,use_lemmas=use_lemmas) write_to_output('+1', feats_positive,output) for idx2, (expids2, tarids2) in enumerate(pairs): if idx1 != idx2: feats_negative = extract_feats_exp_hol(expids1,tarids2,knaf_obj, use_dependencies=use_dependencies,use_tokens=use_tokens,use_lemmas=use_lemmas) write_to_output('-1', feats_negative ,output)
<reponame>closedsum/core<gh_stars>1-10 // Copyright 2017-2021 Closed Sum Games, LLC. All Rights Reserved. #include "Managers/UserWidget/CsTypes_UserWidget.h" #include "CsSettings_Manager_UserWidget.generated.h" #pragma once // FCsSettings_Manager_UserWidget_PoolParams #pragma region USTRUCT(BlueprintType) struct CSUI_API FCsSettings_Manager_UserWidget_PoolParams { GENERATED_USTRUCT_BODY() public: UPROPERTY(EditAnywhere, BlueprintReadOnly) FECsUserWidgetPooledClass Class; UPROPERTY(EditAnywhere, BlueprintReadOnly) FECsUserWidget Widget; UPROPERTY(EditAnywhere, BlueprintReadOnly, meta = (ClampMin = "4", UIMin = "4")) int32 PoolSize; UPROPERTY(EditAnywhere, BlueprintReadOnly, meta = (ClampMin = "4", UIMin = "4")) int32 PayloadSize; FCsSettings_Manager_UserWidget_PoolParams() : Class(), Widget(), PoolSize(4), PayloadSize(4) { } }; #pragma endregion FCsSettings_Manager_UserWidget_PoolParams // FCsSettings_Manager_UserWidget #pragma region class UObject; class UClass; USTRUCT(BlueprintType) struct CSUI_API FCsSettings_Manager_UserWidget { GENERATED_USTRUCT_BODY() public: /** */ UPROPERTY(EditAnywhere, BlueprintReadOnly) TMap<FECsUserWidgetPooled, FECsUserWidgetPooled> TypeMap; /** */ UPROPERTY(EditAnywhere, BlueprintReadOnly) FECsUserWidgetPooled DefaultType; UPROPERTY(EditAnywhere, BlueprintReadOnly) TMap<FECsUserWidgetPooled, FCsSettings_Manager_UserWidget_PoolParams> PoolParams; FCsSettings_Manager_UserWidget() : TypeMap(), DefaultType(), PoolParams() { } }; #pragma endregion FCsSettings_Manager_UserWidget // FCsSettings_Manager_UserWidget_UnitTest #pragma region class UObject; class UClass; USTRUCT(BlueprintType) struct CSUI_API FCsSettings_Manager_UserWidget_UnitTest { GENERATED_USTRUCT_BODY() public: FCsSettings_Manager_UserWidget_UnitTest() { } }; #pragma endregion FCsSettings_Manager_UserWidget_UnitTest
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.service.center.client; import java.io.IOException; import java.net.URISyntaxException; import java.net.URLEncoder; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.http.HttpStatus; import org.apache.http.client.utils.URIBuilder; import org.apache.servicecomb.http.client.auth.RequestAuthHeaderProvider; import org.apache.servicecomb.http.client.common.HttpConfiguration.SSLProperties; import org.apache.servicecomb.http.client.common.HttpResponse; import org.apache.servicecomb.http.client.common.HttpTransport; import org.apache.servicecomb.http.client.common.HttpTransportFactory; import org.apache.servicecomb.http.client.common.HttpUtils; import org.apache.servicecomb.service.center.client.OperationEvents.UnAuthorizedOperationEvent; import org.apache.servicecomb.service.center.client.exception.OperationException; import org.apache.servicecomb.service.center.client.model.CreateMicroserviceInstanceRequest; import org.apache.servicecomb.service.center.client.model.CreateMicroserviceRequest; import org.apache.servicecomb.service.center.client.model.CreateSchemaRequest; import org.apache.servicecomb.service.center.client.model.ErrorMessage; import org.apache.servicecomb.service.center.client.model.FindMicroserviceInstancesResponse; import org.apache.servicecomb.service.center.client.model.GetSchemaListResponse; import org.apache.servicecomb.service.center.client.model.GetSchemaResponse; import org.apache.servicecomb.service.center.client.model.HeartbeatsRequest; import org.apache.servicecomb.service.center.client.model.Microservice; import org.apache.servicecomb.service.center.client.model.MicroserviceInstance; import org.apache.servicecomb.service.center.client.model.MicroserviceInstanceResponse; import org.apache.servicecomb.service.center.client.model.MicroserviceInstanceStatus; import org.apache.servicecomb.service.center.client.model.MicroserviceInstancesResponse; import org.apache.servicecomb.service.center.client.model.MicroserviceResponse; import org.apache.servicecomb.service.center.client.model.MicroservicesResponse; import org.apache.servicecomb.service.center.client.model.ModifySchemasRequest; import org.apache.servicecomb.service.center.client.model.RbacTokenRequest; import org.apache.servicecomb.service.center.client.model.RbacTokenResponse; import org.apache.servicecomb.service.center.client.model.RegisteredMicroserviceInstanceResponse; import org.apache.servicecomb.service.center.client.model.RegisteredMicroserviceResponse; import org.apache.servicecomb.service.center.client.model.SchemaInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.EventBus; public class ServiceCenterClient implements ServiceCenterOperation { private static final Logger LOGGER = LoggerFactory.getLogger(ServiceCenterClient.class); private ServiceCenterRawClient httpClient; private EventBus eventBus; public ServiceCenterClient(ServiceCenterRawClient httpClient) { this.httpClient = httpClient; } public ServiceCenterClient setEventBus(EventBus eventBus) { this.eventBus = eventBus; return this; } public ServiceCenterClient(AddressManager addressManager, SSLProperties sslProperties, RequestAuthHeaderProvider requestAuthHeaderProvider, String tenantName, Map<String, String> extraGlobalHeaders) { HttpTransport httpTransport = HttpTransportFactory.createHttpTransport(sslProperties, requestAuthHeaderProvider); httpTransport.addHeaders(extraGlobalHeaders); this.httpClient = new ServiceCenterRawClient.Builder() .setTenantName(tenantName) .setAddressManager(addressManager) .setHttpTransport(httpTransport).build(); } @Override public MicroserviceInstancesResponse getServiceCenterInstances() { try { HttpResponse response = httpClient.getHttpRequest("/registry/health", null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { return HttpUtils.deserialize(response.getContent(), MicroserviceInstancesResponse.class); } sendUnAuthorizedEvent(response); throw new OperationException( "get service-center instances fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "get service-center instances fails", e); } } @Override public RegisteredMicroserviceResponse registerMicroservice(Microservice microservice) { try { CreateMicroserviceRequest request = new CreateMicroserviceRequest(); request.setService(microservice); HttpResponse response = httpClient .postHttpRequest("/registry/microservices", null, HttpUtils.serialize(request)); if (response.getStatusCode() == HttpStatus.SC_OK) { return HttpUtils.deserialize(response.getContent(), RegisteredMicroserviceResponse.class); } sendUnAuthorizedEvent(response); throw new OperationException( "register service fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "register service fails", e); } } @Override public MicroservicesResponse getMicroserviceList() { try { HttpResponse response = httpClient.getHttpRequest("/registry/microservices", null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { return HttpUtils.deserialize(response.getContent(), MicroservicesResponse.class); } sendUnAuthorizedEvent(response); throw new OperationException( "get service List fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "get service List fails", e); } } @Override public RegisteredMicroserviceResponse queryServiceId(Microservice microservice) { try { URIBuilder uriBuilder = new URIBuilder("/registry/existence"); uriBuilder.setParameter("type", "microservice"); uriBuilder.setParameter("appId", microservice.getAppId()); uriBuilder.setParameter("serviceName", microservice.getServiceName()); uriBuilder.setParameter("version", microservice.getVersion()); uriBuilder.setParameter("env", microservice.getEnvironment()); HttpResponse response = httpClient.getHttpRequest(uriBuilder.build().toString(), null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { return HttpUtils.deserialize(response.getContent(), RegisteredMicroserviceResponse.class); } sendUnAuthorizedEvent(response); LOGGER.info("Query serviceId fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); return null; } catch (IOException e) { throw new OperationException( "query serviceId fails", e); } catch (URISyntaxException e) { throw new OperationException( "build url failed.", e); } } @Override public Microservice getMicroserviceByServiceId(String serviceId) { try { HttpResponse response = httpClient.getHttpRequest("/registry/microservices/" + serviceId, null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { MicroserviceResponse microserviceResponse = HttpUtils .deserialize(response.getContent(), MicroserviceResponse.class); return microserviceResponse.getService(); } sendUnAuthorizedEvent(response); throw new OperationException( "get service message fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "get service message fails", e); } } @Override public RegisteredMicroserviceInstanceResponse registerMicroserviceInstance(MicroserviceInstance instance) { try { CreateMicroserviceInstanceRequest request = new CreateMicroserviceInstanceRequest(); request.setInstance(instance); HttpResponse response = httpClient .postHttpRequest("/registry/microservices/" + instance.getServiceId() + "/instances", null, HttpUtils.serialize(request)); if (response.getStatusCode() == HttpStatus.SC_OK) { return HttpUtils.deserialize(response.getContent(), RegisteredMicroserviceInstanceResponse.class); } sendUnAuthorizedEvent(response); throw new OperationException( "register service instance fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "register service instance fails", e); } } @Override public FindMicroserviceInstancesResponse findMicroserviceInstance(String consumerId, String appId, String serviceName, String versionRule, String revision) { try { Map<String, String> headers = new HashMap<>(); headers.put("X-ConsumerId", consumerId); HttpResponse response = httpClient .getHttpRequest("/registry/instances?appId=" + URLEncoder.encode(appId, "UTF-8") + "&serviceName=" + HttpUtils.encodeURLParam(serviceName) + "&version=" + HttpUtils.encodeURLParam(versionRule) + "&rev=" + HttpUtils.encodeURLParam(revision) , headers, null); FindMicroserviceInstancesResponse result = new FindMicroserviceInstancesResponse(); if (response.getStatusCode() == HttpStatus.SC_OK) { result.setModified(true); result.setRevision(response.getHeader("X-Resource-Revision")); result.setMicroserviceInstancesResponse( HttpUtils.deserialize(response.getContent(), MicroserviceInstancesResponse.class)); return result; } if (response.getStatusCode() == HttpStatus.SC_NOT_MODIFIED) { result.setModified(false); return result; } sendUnAuthorizedEvent(response); throw new OperationException( "get service instances list fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "get service instances list fails", e); } } @Override public MicroserviceInstancesResponse getMicroserviceInstanceList(String serviceId) { try { HttpResponse response = httpClient .getHttpRequest("/registry/microservices/" + serviceId + "/instances", null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { return HttpUtils.deserialize(response.getContent(), MicroserviceInstancesResponse.class); } sendUnAuthorizedEvent(response); throw new OperationException( "get service instances list fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "get service instances list fails", e); } } @Override public MicroserviceInstance getMicroserviceInstance(String serviceId, String instanceId) { try { HttpResponse response = httpClient .getHttpRequest("/registry/microservices/" + serviceId + "/instances/" + instanceId, null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { MicroserviceInstanceResponse instanceResponse = HttpUtils .deserialize(response.getContent(), MicroserviceInstanceResponse.class); return instanceResponse.getInstance(); } sendUnAuthorizedEvent(response); throw new OperationException( "get service instance message fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "get service instance message fails", e); } } @Override public void deleteMicroserviceInstance(String serviceId, String instanceId) { try { HttpResponse response = httpClient .deleteHttpRequest("/registry/microservices/" + serviceId + "/instances/" + instanceId, null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { LOGGER.info("Delete service instance successfully."); return; } sendUnAuthorizedEvent(response); throw new OperationException( "delete service instance fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "delete service instance fails", e); } } @Override public boolean updateMicroserviceInstanceStatus(String serviceId, String instanceId, MicroserviceInstanceStatus status) { try { HttpResponse response = httpClient.putHttpRequest( "/registry/microservices/" + serviceId + "/instances/" + instanceId + "/status?value=" + status, null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { return true; } sendUnAuthorizedEvent(response); throw new OperationException( "update service instance status fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "update service instance status fails", e); } } @Override public void sendHeartBeats(HeartbeatsRequest heartbeatsRequest) { try { HttpResponse response = httpClient .putHttpRequest("/registry/heartbeats", null, HttpUtils.serialize(heartbeatsRequest)); if (response.getStatusCode() == HttpStatus.SC_OK) { return; } sendUnAuthorizedEvent(response); throw new OperationException( "heartbeats fails, statusCode = " + response.getStatusCode() + "; message = " + response.getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "heartbeats fails ", e); } } @Override public boolean sendHeartBeat(String serviceId, String instanceId) { try { HttpResponse response = httpClient .putHttpRequest("/registry/microservices/" + serviceId + "/instances/" + instanceId + "/heartbeat", null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { return true; } sendUnAuthorizedEvent(response); throw new OperationException( "heartbeats fails, statusCode = " + response.getStatusCode() + "; message = " + response.getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "heartbeats fails ", e); } } @Override public List<SchemaInfo> getServiceSchemasList(String serviceId, boolean withContent) { String url = "/registry/microservices/" + serviceId + "/schemas"; if (withContent) { url = url + "?withSchema=1"; } try { HttpResponse response = httpClient .getHttpRequest(url, null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { GetSchemaListResponse getSchemaResponse = HttpUtils .deserialize(response.getContent(), GetSchemaListResponse.class); return getSchemaResponse.getSchemas(); } sendUnAuthorizedEvent(response); throw new OperationException( "get service schemas list fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "get service schemas list fails", e); } } /** * Get one schema context of service * * @param serviceId * @param schemaId * @return * @throws OperationException */ public String getServiceSchemaContext(String serviceId, String schemaId) { try { HttpResponse response = httpClient .getHttpRequest("/registry/microservices/" + serviceId + "/schemas/" + schemaId, null, null); if (response.getStatusCode() == HttpStatus.SC_OK) { GetSchemaResponse getSchemaResponse = HttpUtils.deserialize(response.getContent(), GetSchemaResponse.class); return getSchemaResponse.getSchema(); } sendUnAuthorizedEvent(response); throw new OperationException( "get service schema context fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "get service schemas context fails", e); } } @Override public boolean registerSchema(String serviceId, String schemaId, CreateSchemaRequest schema) { try { HttpResponse response = httpClient .putHttpRequest("/registry/microservices/" + serviceId + "/schemas/" + schemaId, null, HttpUtils.serialize(schema)); if (response.getStatusCode() == HttpStatus.SC_OK) { return true; } sendUnAuthorizedEvent(response); throw new OperationException( "update service schema fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "update service schema fails", e); } } @Override public boolean updateServiceSchemaContext(String serviceId, SchemaInfo schemaInfo) { try { CreateSchemaRequest request = new CreateSchemaRequest(); request.setSchema(schemaInfo.getSchema()); request.setSummary(schemaInfo.getSummary()); HttpResponse response = httpClient .putHttpRequest("/registry/microservices/" + serviceId + "/schemas/" + schemaInfo.getSchemaId(), null, HttpUtils.serialize(request)); if (response.getStatusCode() == HttpStatus.SC_OK) { return true; } sendUnAuthorizedEvent(response); throw new OperationException( "update service schema fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "update service schema fails", e); } } @Override public boolean batchUpdateServiceSchemaContext(String serviceId, ModifySchemasRequest modifySchemasRequest) { try { HttpResponse response = httpClient .postHttpRequest("/registry/microservices/" + serviceId + "/schemas", null, HttpUtils.serialize(modifySchemasRequest)); if (response.getStatusCode() == HttpStatus.SC_OK) { return true; } sendUnAuthorizedEvent(response); throw new OperationException( "update service schema fails, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "update service schema fails", e); } } private void sendUnAuthorizedEvent(HttpResponse response) { if (this.eventBus != null && response.getStatusCode() == HttpStatus.SC_UNAUTHORIZED) { this.eventBus.post(new UnAuthorizedOperationEvent()); } } @Override public RbacTokenResponse queryToken(RbacTokenRequest request) { try { HttpResponse response = httpClient .postHttpRequestAbsoluteUrl("/v4/token", null, HttpUtils.serialize(request)); if (response.getStatusCode() == HttpStatus.SC_OK) { RbacTokenResponse result = HttpUtils.deserialize(response.getContent(), RbacTokenResponse.class); result.setStatusCode(HttpStatus.SC_OK); return result; } if (response.getStatusCode() == HttpStatus.SC_NOT_FOUND) { RbacTokenResponse result = new RbacTokenResponse(); result.setStatusCode(response.getStatusCode()); return result; } if (response.getStatusCode() == HttpStatus.SC_UNAUTHORIZED || response.getStatusCode() == HttpStatus.SC_FORBIDDEN) { RbacTokenResponse result = new RbacTokenResponse(); result.setStatusCode(response.getStatusCode()); ErrorMessage errorMessage = HttpUtils.deserialize(response.getContent(), ErrorMessage.class); result.setErrorCode(errorMessage.getErrorCode()); return result; } throw new OperationException( "query token failed, statusCode = " + response.getStatusCode() + "; message = " + response .getMessage() + "; content = " + response.getContent()); } catch (IOException e) { throw new OperationException( "query token failed", e); } } }
/** * Define an API with a couple of different endpoints that allow us to exercise * various functionality. */ fn paginate_api() -> ApiDescription<usize> { let mut api = ApiDescription::new(); api.register(api_integers).unwrap(); api.register(api_empty).unwrap(); api.register(api_with_extra_params).unwrap(); api.register(api_with_required_params).unwrap(); api.register(api_dictionary).unwrap(); api }
New BeerSmith 2016 Add-ons for Hops, Yeast, Misc and Grains I’m happy to announce a series of new add-ons for BeerSmith Desktop. The new add-ons feature over 170 new yeasts, 32 hop varieties and 48 Misc items plus updates for Avangard, Bairds, Muntons and Rahr malts. Thanks largely to the efforts of fellow brewer Brian Young, we now have most of the latest hops, yeast and misc updates available as easy add-ons in BeerSmith. The names of the new add-ons are listed below. To download the add-ons in BeerSmith desktop, go to File->Add-ons and click on the Add button, then select the add-on and click Install: Hop Update 2016 (48 items) Yeast Update 2016 (178 items) Misc Update 2016 (48 items) Avangard Malts (5 items) Bairds Malts (15 items) Muntons Malts (17 items) Rahr Malts (7 items) Thanks again to Brian Young for championing this effort! Note that you can download a free trial of BeerSmith 2 desktop here if you have not already downloaded it and I have a series of tutorials here on how to use it! Related Beer Brewing Articles from BeerSmith:
package org.jruby.ext.ffi.jffi; import com.kenai.jffi.ObjectParameterStrategy; import com.kenai.jffi.ObjectParameterType; /** * */ abstract public class PointerParameterStrategy extends ObjectParameterStrategy { PointerParameterStrategy(boolean isDirect) { super(isDirect); } PointerParameterStrategy(boolean isDirect, ObjectParameterType objectType) { super(isDirect, objectType); } PointerParameterStrategy(StrategyType type) { super(type); } PointerParameterStrategy(StrategyType type, ObjectParameterType objectType) { super(type, objectType); } }
import java.util.*; import java.io.*; public class Solution{ static PrintWriter out=new PrintWriter(System.out); public static void main (String[] args) throws IOException{ BufferedReader br=new BufferedReader(new InputStreamReader(System.in)); String[] input=br.readLine().trim().split(" "); int numTestCases=Integer.parseInt(input[0]); while(numTestCases-->0){ input=br.readLine().trim().split(" "); long n=Long.parseLong(input[0]); printSequence(n); } out.flush(); out.close(); } public static void printSequence(long n) { long temp = n; int maxFreq = 0; long maxFreqElement = n; ArrayList<Long> factors = new ArrayList<>(); for(int i = 2; i <= Math.sqrt(n); i++){ int freq = 0; while(n % i == 0) { freq++; n /= i; factors.add(1L * i); } if(freq > maxFreq) { maxFreq = freq; maxFreqElement = i; } } if(n > 1) { factors.add(n); int freq = 1; if(freq > maxFreq) { maxFreq = freq; maxFreqElement = n; } } out.println(maxFreq); n = temp; ArrayList<Long> ans = new ArrayList<>(); for(int i = 0; i < maxFreq; i++){ ans.add(1L); } Collections.sort(factors); for(int i = 0; i < factors.size();){ int j = i; int index = ans.size() - 1; while(j < factors.size() && factors.get(i).equals(factors.get(j))) { long element = ans.get(index); element *= factors.get(j); ans.set(index, element); index--; j++; } i = j; } for(int i = 0; i < ans.size(); i++){ out.print(ans.get(i) + " "); } out.println(); } }
#include <iostream> using namespace std; int n,a[100000+5]; int main(){ cin>>n; int cur=n; for(int i=1 ; i<=n ; i++){ int x; cin >>x; if(x==cur){ cout<<x<<" "; cur--; while(true){ if(a[cur]>0){ cout<<cur<<" "; cur--; } else{ break; } } cout<<endl; } else{ cout<<endl; a[x]++; } } }
California Republicans get no respect. Their numbers have dwindled to the point that no one from the hard core right can get elected to statewide office, and the Republican governor, Arnold Schwarzenegger, has revealed himself to be more liberal than a lot Democrats in the state. Palin may have “electrified” California Republicans, but her presence on the ticket has done diddley squat for the GOP ticket in the state. Now even their rock star candidate, vice presidential wannabe Sarah Palin, has dissed them. She has canceled a major trip to the state next week: The change is a shocker, because Palin’s presence had electrified the GOP base in California. Party insiders were distributing 15,000 tickets to her Sept. 26 rally in Orange County — and fundraisers reported an almost instantaneous sell-out of her two $1,000-a-head Sept. 25 fundraising events in Orange County and Santa Clara. Both fundraisers had generated such high ticket sales that the OC Lincoln Club event was moved to the Orange County Performing Arts Center, and the Bay Area event was moved from the Woodside home of Tom Siebel to the huge Santa Clara Convention Center. Palin may have “electrified” California Republicans, but her presence on the ticket has done diddley squat for the GOP ticket in the state. A new Field Poll shows that Obama-Biden is leading McCain-Palin by a whopping 16 points hereabouts. Still, it’s odd that she would cancel a fundraiser. Obama collected $9 million in a quick, one-day visit to Beverly Hills earlier this week.
/* Editor: Abdelrahman Hossam Nickname: Blobo2_Blobo2 IOI next year isA :) */ //#pragma GCC optimize ("O3") //#pragma GCC optimize ("unroll-loops") #include<bits/stdc++.h> using namespace std; #define int long long #define endl "\n" #define all(v) v.begin(),v.end() #define gen(arr,n,nxt) generate(arr,arr+n,nxt) #define Blobo2_el_gamded_gedan_elly_3ayz_yro7_IOI_w_3ayz_yakol_jilaty ios_base::sync_with_stdio(false);cin.tie(0); const int mo=1e9+7; int nxt(){int x;cin>>x;return x;} signed main(){ Blobo2_el_gamded_gedan_elly_3ayz_yro7_IOI_w_3ayz_yakol_jilaty int n=nxt(),k=nxt(),x=nxt(); int arr[n]; gen(arr,n,nxt); vector<pair<int,int> >v; sort(arr,arr+n); int mn=1e18+1,mx=0; for(int i=0;i<n-1;i++){ if(arr[i+1]-arr[i]>x) mn=min(mn,arr[i]),mx=max(mx,arr[i]),v.push_back({mn,mx}),mx=arr[i+1],mn=arr[i+1]; else mx=max({mx,arr[i],arr[i+1]}),mn=min({mn,arr[i],arr[i+1]}); } v.push_back({mn,mx}); int cnt=v.size(); vector<int>num; if(v.size()){ for(int i=0;i<v.size()-1;i++) num.push_back(((v[i+1].first-v[i].second-1)/x)); sort(all(num)); for(int i=0;i<num.size();i++){ if(k<num[i])break; k-=num[i]; cnt--; } } cout<<cnt<<endl; return 0; }
tmp = list(map(int,input().split())) n,a,b = tmp[0],tmp[1],tmp[2] if abs(a-b) % 2: print("Borys") else: print("Alice")
// // macro.h // Moline // // Created by l.t.zero on 2019/3/9. // Copyright © 2019 sunlantao. All rights reserved. // #ifndef macro_h #define macro_h #define SingletonProperty(Class, propterty, block)\ - (Class *)propterty{\ if (!_##propterty){\ do{block;}while(0);\ }\ return _##propterty;\ }\ #define SuppressPerformSelectorLeakWarning(Stuff) \ do { \ _Pragma("clang diagnostic push") \ _Pragma("clang diagnostic ignored \"-Warc-performSelector-leaks\"") \ Stuff; \ _Pragma("clang diagnostic pop") \ } while (0) #define AttributedStringKey(key) NSAttributedStringKey const key = @#key #define AttributedStringKeyValue(key, value) NSAttributedStringKey const key = @#value #define SP(Class, propterty, block) SingletonProperty(Class, propterty, block) #define PropertyDeclareNonStrong(Class, name) @property (nonatomic, strong) Class *name; #define PropertyDeclareNonWeak(Class, name) @property (nonatomic, weak) Class *name; #define PropertyDeclareNonCopy(Class, name) @property (nonatomic, copy) Class *name; #define PropertyDeclareNonAssign(Class, name) @property (nonatomic, assign) Class *name; #define PropertyDeclareNonBaseAssign(Class, name) @property (nonatomic, assign) Class name; #define AdjustmentContentInsetDisable(obj) \ if (@available(iOS 11.0, *)){\ obj.contentInsetAdjustmentBehavior = UIScrollViewContentInsetAdjustmentNever;\ }\ #define NSURLString(url) [NSURL URLWithString:url] #define ValidateString(str) (![str isKindOfClass:[NSNull class]] && str.length >0) #define StringSafe(x, placeholder) ValidateString(x)?x:placeholder #define Tag(str) (NSInteger)str #define CGRectClipInsets(frame, top, left, bottom, right) \ CGRectMake(CGRectGetMinX(frame) + left, CGRectGetMinY(frame) + top, CGRectGetWidth(frame) - left - right, CGRectGetHeight(frame) - top - bottom) #define ImageNamed(name) [UIImage imageNamed:image] #define TextFieldInsets(ttf,top, left, bottom, right) \ [ttf setValue:@(top) forKey:@"paddingTop"];\ [ttf setValue:@(left) forKey:@"paddingLeft"];\ [ttf setValue:@(bottom) forKey:@"paddingBottom"];\ [ttf setValue:@(right) forKey:@"paddingRight"]; #endif /* macro_h */
# -*- coding: utf-8 -*- from .retry import retry from .retrier import Retrier from .backoff import Backoff from .constants import PY_34 from .errors import ErrorWhitelist, ErrorBlacklist, add_whitelist_error from .strategies import * # noqa from .exceptions import (RetryError, MaxRetriesExceeded, RetryTimeoutError, NotRetriableError) __author__ = '<NAME>' __license__ = 'MIT' # Current package version __version__ = '0.2.6' # Explicit symbols to export __all__ = ( 'retry', 'Retrier', 'Backoff', 'ConstantBackoff', 'FibonacciBackoff', 'ExponentialBackOff', 'ErrorWhitelist', 'ErrorBlacklist', 'add_whitelist_error', 'RetryError', 'MaxRetriesExceeded', 'RetryTimeoutError', 'NotRetriableError' ) # Expose asynchronous retrier if running in Python +3.4 if PY_34: from .async_retrier import AsyncRetrier __all__ += ('AsyncRetrier',)
// FIXME): We shouldn't have to pass the available logical width as argument. The problem is that // availableLogicalWidth() does always return a value even if we cannot resolve it like when // computing the intrinsic size (preferred widths). That's why we pass the responsibility to the // caller who does know whether the available logical width is indefinite or not. void RenderGrid::placeItemsOnGrid(Grid& grid, std::optional<LayoutUnit> availableSpace) const { unsigned autoRepeatColumns = computeAutoRepeatTracksCount(ForColumns, availableSpace); unsigned autoRepeatRows = computeAutoRepeatTracksCount(ForRows, availableLogicalHeightForPercentageComputation()); autoRepeatRows = clampAutoRepeatTracks(ForRows, autoRepeatRows); autoRepeatColumns = clampAutoRepeatTracks(ForColumns, autoRepeatColumns); if (autoRepeatColumns != grid.autoRepeatTracks(ForColumns) || autoRepeatRows != grid.autoRepeatTracks(ForRows)) { grid.setNeedsItemsPlacement(true); grid.setAutoRepeatTracks(autoRepeatRows, autoRepeatColumns); } if (!grid.needsItemsPlacement()) return; ASSERT(!grid.hasGridItems()); populateExplicitGridAndOrderIterator(grid); Vector<RenderBox*> autoMajorAxisAutoGridItems; Vector<RenderBox*> specifiedMajorAxisAutoGridItems; bool hasAnyOrthogonalGridItem = false; for (auto* child = grid.orderIterator().first(); child; child = grid.orderIterator().next()) { if (grid.orderIterator().shouldSkipChild(*child)) continue; hasAnyOrthogonalGridItem = hasAnyOrthogonalGridItem || GridLayoutFunctions::isOrthogonalChild(*this, *child); GridArea area = grid.gridItemArea(*child); if (!area.rows.isIndefinite()) area.rows.translate(std::abs(grid.smallestTrackStart(ForRows))); if (!area.columns.isIndefinite()) area.columns.translate(std::abs(grid.smallestTrackStart(ForColumns))); if (area.rows.isIndefinite() || area.columns.isIndefinite()) { grid.setGridItemArea(*child, area); bool majorAxisDirectionIsForColumns = autoPlacementMajorAxisDirection() == ForColumns; if ((majorAxisDirectionIsForColumns && area.columns.isIndefinite()) || (!majorAxisDirectionIsForColumns && area.rows.isIndefinite())) autoMajorAxisAutoGridItems.append(child); else specifiedMajorAxisAutoGridItems.append(child); continue; } grid.insert(*child, { area.rows, area.columns }); } grid.setHasAnyOrthogonalGridItem(hasAnyOrthogonalGridItem); #if ENABLE(ASSERT) if (grid.hasGridItems()) { ASSERT(grid.numTracks(ForRows) >= GridPositionsResolver::explicitGridRowCount(style(), grid.autoRepeatTracks(ForRows))); ASSERT(grid.numTracks(ForColumns) >= GridPositionsResolver::explicitGridColumnCount(style(), grid.autoRepeatTracks(ForColumns))); } #endif placeSpecifiedMajorAxisItemsOnGrid(grid, specifiedMajorAxisAutoGridItems); placeAutoMajorAxisItemsOnGrid(grid, autoMajorAxisAutoGridItems); grid.setAutoRepeatEmptyColumns(computeEmptyTracksForAutoRepeat(grid, ForColumns)); grid.setAutoRepeatEmptyRows(computeEmptyTracksForAutoRepeat(grid, ForRows)); grid.setNeedsItemsPlacement(false); #if ENABLE(ASSERT) for (auto* child = grid.orderIterator().first(); child; child = grid.orderIterator().next()) { if (grid.orderIterator().shouldSkipChild(*child)) continue; GridArea area = grid.gridItemArea(*child); ASSERT(area.rows.isTranslatedDefinite() && area.columns.isTranslatedDefinite()); } #endif }
/** Describes a type that can be created anew. Used by <a href="@org-openide-nodes/org/openide/nodes.Node#getNewTypes">Node.getNewTypes</a>. * * @author Jaroslav Tulach */ public abstract class NewType extends Object implements HelpCtx.Provider { /** Display name for the creation action. This should be * presented as an item in a menu. * * @return the name of the action */ public String getName() { return NbBundle.getBundle(NewType.class).getString("Create"); } /** Help context for the creation action. * @return the help context */ public HelpCtx getHelpCtx() { return org.openide.util.HelpCtx.DEFAULT_HELP; } /** Create the object. * @exception IOException if something fails */ public abstract void create() throws IOException; /* JST: Originally designed for dnd and it now uses getDropType () of a node. * * Create the object at a specific position. * The default implementation simply calls {@link #create()}. * Subclasses may * allow pastes to a specific index in their * children list (if the object has children indexed by integer). * * @param indx index to insert into, can be ignored if not supported * @throws IOException if something fails * public void createAt (int indx) throws IOException { create (); } */ }