content
stringlengths
10
4.9M
/** * Test if the current ipAddress exists * @param ipAddress * @return * @throws LoggerException */ public boolean ipExist(String ipAddress) throws LoggerException { Statement st = null; ResultSet res = null; try { if (conn == null || conn.isClosed()) { throw new LoggerException("Database null or closed"); } String sqlRequest = "SELECT count(*) from " + TABLE_NAME + " where " + IP_ADDRESS + "='" + ipAddress + "'"; st = conn.createStatement(); res = st.executeQuery(sqlRequest); res.next(); if (res.getInt(1) == 0) { return false; } else if (res.getInt(1) > 0) { return true; } } catch (SQLException e) { throw new LoggerException("IPcompanyTable:ipExist( String ipAddress ) problem : (" + e.getErrorCode() + " )" + e.getMessage()); } finally { try{ if (res != null){ res.close(); } if (st != null){ st.close(); } }catch (SQLException e) { } } return false; }
import { Uri, workspace } from "vscode"; import * as util from "util"; import * as path from "path"; import * as winreg from "winreg"; import * as fs from "fs"; import * as tmp from "tmp-promise"; import { exec } from "child_process"; import * as vscode from "vscode"; import * as crypto from "crypto"; import { TestInfo, TestSuiteInfo } from "vscode-test-adapter-api"; import { findTests } from "./parser"; import { appendFile as _appendFile } from "fs"; import { TestthatAdapter } from "./adapter"; import { lookpath } from "lookpath"; const appendFile = util.promisify(_appendFile); let RscriptPath: string | undefined; export async function runSingleTestFile( adapter: TestthatAdapter, filePath: string ): Promise<string> { let cleanFilePath = filePath.replace(/\\/g, "/"); let projectDirMatch = cleanFilePath.match(/(.+?)\/tests\/testthat.+?/i); let devtoolsCall = `options("testthat.use_colours"=F);devtools::test_file('${cleanFilePath}')`; let RscriptCommand = await getRscriptCommand(adapter); let command = `${RscriptCommand} -e "${devtoolsCall}"`; let cwd = projectDirMatch ? projectDirMatch[1] : vscode.workspace.workspaceFolders![0].uri.fsPath; return new Promise(async (resolve, reject) => { let childProcess = exec(command, { cwd }, (err, stdout: string, stderr: string) => { adapter.childProcess = undefined; if (err) reject(stderr); resolve(stdout); }); adapter.childProcess = childProcess; }); } export async function runDescribeTestSuite(adapter: TestthatAdapter, suite: TestSuiteInfo) { let documentUri = Uri.file(suite.file!); let document = await workspace.openTextDocument(documentUri); let source = document.getText(); let allTests = await findTests(documentUri); for (const parsedTest of allTests) { const { testSuperLabel, testStartIndex, testEndIndex, testSuperEndIndex } = parsedTest; if (testEndIndex >= source.length) break; if (testSuperLabel != suite.label) { source = source.slice(0, testStartIndex) + " ".repeat(testEndIndex - testStartIndex) + source.slice(testEndIndex!); } else { source = source.slice(0, testSuperEndIndex!); break; } } let randomFileInfix = randomChars(); let tmpFileName = `test-${randomFileInfix}.R`; let tmpFilePath = path.normalize(path.join(path.dirname(suite.file!), tmpFileName)); adapter.tempFilePaths.add(tmpFilePath); // Do not clean up tempFilePaths, not possible to get around the race condition // cleanup is not guaranteed to unlink the file immediately let tmpFileResult = await tmp.file({ name: tmpFileName, tmpdir: path.dirname(suite.file!), }); await appendFile(tmpFilePath, source); return runSingleTestFile(adapter, tmpFilePath) .catch(async (err) => { await tmpFileResult.cleanup(); throw err; }) .then(async (value) => { await tmpFileResult.cleanup(); return value; }); } export async function runSingleTest(adapter: TestthatAdapter, test: TestInfo) { let documentUri = Uri.file(test.file!); let document = await workspace.openTextDocument(documentUri); let source = document.getText(); let allTests = await findTests(documentUri); for (const parsedTest of allTests) { const { testStartIndex, testEndIndex, testSuperEndIndex, testLabel } = parsedTest; if (testEndIndex >= source.length) break; if (testLabel != test.label) { source = source.slice(0, testStartIndex) + " ".repeat(testEndIndex - testStartIndex) + source.slice(testEndIndex!); } else { let lastIndex = testSuperEndIndex ? testSuperEndIndex : testEndIndex; source = source.slice(0, lastIndex); } } let randomFileInfix = randomChars(); let tmpFileName = `test-${randomFileInfix}.R`; let tmpFilePath = path.normalize(path.join(path.dirname(test.file!), tmpFileName)); adapter.tempFilePaths.add(tmpFilePath); // Do not clean up tempFilePaths, not possible to get around the race condition // cleanup is not guaranteed to unlink the file immediately let tmpFileResult = await tmp.file({ name: tmpFileName, tmpdir: path.dirname(test.file!), }); await appendFile(tmpFilePath, source); return runSingleTestFile(adapter, tmpFilePath) .catch(async (err) => { await tmpFileResult.cleanup(); throw err; }) .then(async (value) => { await tmpFileResult.cleanup(); return value; }); } async function getRscriptCommand(adapter: TestthatAdapter) { let config = vscode.workspace.getConfiguration("RTestAdapter"); let configPath: string | undefined = config.get("RscriptPath"); if (configPath !== undefined && configPath !== null) { if ((<string>configPath).length > 0 && fs.existsSync(configPath)) return Promise.resolve(`"${configPath}"`); else { adapter.log.warn( `Rscript path given in the configuration ${configPath} is invalid. Falling back to defaults.` ); } } if (RscriptPath !== undefined) return Promise.resolve(`"${RscriptPath}"`); RscriptPath = await lookpath("Rscript"); if (RscriptPath !== undefined) return Promise.resolve(`"${RscriptPath}"`); if (process.platform != "win32") { let candidates = ["/usr/bin", "/usr/local/bin"]; for (const candidate of candidates) { let possibleRscriptPath = path.join(candidate, "Rscript"); if (fs.existsSync(possibleRscriptPath)) { adapter.log.info(`found Rscript among candidate paths: ${possibleRscriptPath}`); RscriptPath = possibleRscriptPath; return Promise.resolve(`"${RscriptPath}"`); } } } else { try { const key = new winreg({ hive: winreg.HKLM, key: "\\Software\\R-Core\\R", }); const item: winreg.RegistryItem = await new Promise((resolve, reject) => key.get("InstallPath", (err, result) => (err ? reject(err) : resolve(result))) ); const rhome = item.value; let possibleRscriptPath = rhome + "\\bin\\Rscript.exe"; if (fs.existsSync(possibleRscriptPath)) { adapter.log.info(`found Rscript in registry: ${possibleRscriptPath}`); RscriptPath = possibleRscriptPath; return Promise.resolve(`"${RscriptPath}"`); } } catch (e) {} } throw Error("Rscript could not be found in PATH, cannot run the tests."); } function randomChars() { const RANDOM_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; const count = 12; let value = [], rnd = null; // make sure that we do not fail because we ran out of entropy try { rnd = crypto.randomBytes(count); } catch (e) { rnd = crypto.pseudoRandomBytes(count); } for (var i = 0; i < 12; i++) { value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); } return value.join(""); } export const _unittestable = { getRscriptCommand, randomChars, };
def _record_name(self, name): if not isinstance(name, str): raise ValueError("record name must be a string") return self.SEPARATOR.join( [morpheme for morpheme in [self.prefix, name, self.suffix] if morpheme is not None])
<filename>graphql-node-post/node_modules/apollo-server-core/src/gql.ts // This currently provides the ability to have syntax highlighting as well as // consistency between client and server gql tags import type { DocumentNode } from 'graphql'; import gqlTag from 'graphql-tag'; export const gql: ( template: TemplateStringsArray | string, ...substitutions: any[] ) => DocumentNode = gqlTag;
<gh_stars>1-10 # Copyright 2010 New Relic, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import functools from newrelic.api.time_trace import current_trace, record_exception from newrelic.common.object_wrapper import FunctionWrapper, wrap_object class ErrorTrace(object): def __init__(self, ignore_errors=[], **kwargs): parent = None if kwargs: if len(kwargs) > 1: raise TypeError("Invalid keyword arguments:", kwargs) parent = kwargs['parent'] if parent is None: parent = current_trace() self._transaction = parent and parent.transaction self._ignore_errors = ignore_errors def __enter__(self): return self def __exit__(self, exc, value, tb): if exc is None or value is None or tb is None: return if self._transaction is None: return record_exception(exc=exc, value=value, tb=tb, ignore_errors=self._ignore_errors) def ErrorTraceWrapper(wrapped, ignore_errors=[]): def wrapper(wrapped, instance, args, kwargs): parent = current_trace() if parent is None: return wrapped(*args, **kwargs) with ErrorTrace(ignore_errors, parent=parent): return wrapped(*args, **kwargs) return FunctionWrapper(wrapped, wrapper) def error_trace(ignore_errors=[]): return functools.partial(ErrorTraceWrapper, ignore_errors=ignore_errors) def wrap_error_trace(module, object_path, ignore_errors=[]): wrap_object(module, object_path, ErrorTraceWrapper, (ignore_errors, ))
#include "utils.hpp" #include <cstring> #ifndef _WIN32 extern "C" { #include <pwd.h> #include <sys/types.h> #include <unistd.h> } #endif namespace util { std::mt19937_64& rng() { static thread_local std::mt19937_64 generator{std::random_device{}()}; return generator; } uint64_t uniform_distribution_portable(std::mt19937_64& mersenne_twister, uint64_t n) { const uint64_t secure_max = mersenne_twister.max() - mersenne_twister.max() % n; uint64_t x; do x = mersenne_twister(); while (x >= secure_max); return x / (secure_max / n); } int get_fd_limit() { #ifdef _WIN32 return -1; #endif return sysconf(_SC_OPEN_MAX); } std::optional<std::filesystem::path> get_home_dir() { /// TODO: support default dir for Windows #ifndef WIN32 char* home = getenv("HOME"); if (!home || !strlen(home)) if (const auto* pwd = getpwuid(getuid())) home = pwd->pw_dir; if (home && strlen(home)) return std::filesystem::u8path(home); #endif return std::nullopt; } } // namespace util
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package circleland.Attacks; import circleland.CircleAttack; import circleland.CircleEntity; import circleland.CircleLandUtility; import circleland.CircleMap; import java.awt.Color; import java.awt.Graphics2D; import java.awt.geom.Point2D; /** * * @author Jeff */ public class BounceAttack extends CircleAttack{ private static final Color OUTLINE_COLOR = Color.WHITE; private int bounceRadius; private double bounceSpeed; private Color bulletColor; private CircleEntity nextBounce; public BounceAttack(CircleEntity owner, int life,int dmg, int pierce, double x, double y, double velX, double velY, double bSpeed,int bounceR,int bSize, Color bColor){ super(owner,life,dmg,pierce,x,y,velX,velY,bSize,bColor); hitSound = "sounds/Hit6.wav"; bounceRadius = bounceR; bounceSpeed = bSpeed; } public void update(long deltaTime, CircleMap world){ //move attack; life -= deltaTime; if(nextBounce == null) { position.x += velocity.x * deltaTime/1000; position.y += velocity.y * deltaTime/1000; } else { double deltaY = nextBounce.position().y - position.y; double deltaX = nextBounce.position().x - position.x; double heading = Math.atan2(deltaY, deltaX); velocity.x = bounceSpeed * Math.cos(heading); velocity.y = bounceSpeed * Math.sin(heading); position.x += velocity.x * deltaTime/1000; position.y += velocity.y * deltaTime/1000; } //check for collision with player or monster if(team == CircleLandUtility.MONSTER_SIDE) {// monsters attacking players boolean hitCircle = false; for(CircleEntity player : world.players()) { if(intersectsCircle(player)) { if(!hitAlready.contains(player)) { player.hitByAttack(this,world); hitAlready.add(player); piercingLeft -= 1; hitCircle = true; if(nextBounce != null) nextBounce = null; break; } } } if(hitCircle){ // hit a player, target next player for(CircleEntity player : world.players()) { //player is within bounce radius if(distanceBetweenPoints(player.position(),position) <= bounceRadius) { if(!hitAlready.contains(player)) nextBounce = player; } } } } if(team == CircleLandUtility.PLAYER_SIDE) {// players attacking monsters boolean hitCircle = false; for(CircleEntity monster : world.monsters()) { if(intersectsCircle(monster)) { if(!hitAlready.contains(monster)) { monster.hitByAttack(this,world); hitAlready.add(monster); piercingLeft -= 1; hitCircle = true; if(nextBounce != null) nextBounce = null; break; } } } if(hitCircle){ // hit a player, target next player for(CircleEntity monster : world.monsters()) { //player is within bounce radius if(distanceBetweenPoints(monster.position(),position) <= bounceRadius) { if(!hitAlready.contains(monster)) nextBounce = monster; } } } } } public void draw(Graphics2D graphics){ graphics.setColor(OUTLINE_COLOR); graphics.fillOval((int)(position.x - (size+4)/ 2), (int)(position.y - (size+4) / 2), (int)size+4, (int)size+4); graphics.setColor(bulletColor); graphics.fillOval((int)(position.x - size / 2), (int)(position.y - size / 2), (int)size, (int)size); } }
def peer_ips(peer_relation='cluster', addr_key='private-address'): peers = {} for r_id in relation_ids(peer_relation): for unit in relation_list(r_id): peers[unit] = relation_get(addr_key, rid=r_id, unit=unit) return peers
Rigged. (Jae C. Hong/AP Photo) Bernie Sanders is correct both that the 2016 Democratic presidential nomination was rigged against him and that the chief culprit is Democratic National Committee Chairwoman Debbie Wasserman Schultz. It goes far beyond the super delegate party regulars who support her almost unanimously and now make up most of her delegate lead. It goes beyond the near-riot at the Nevada state Democratic convention or even the closed nominating contests that favored Clinton even when the voters didn't. Party chairs are supposed to remain neutral in presidential races, but Wasserman Schultz, who served as co-chair of Hillary Clinton's campaign in 2008, has not been neutral at all. She sharply limited the number of debates and scheduled them for times when few were likely to watch, such as Saturday nights, to limit Clinton's exposure to attacks and deny her opponents name recognition. She entered into a joint fundraising agreement with the Clinton campaign in August 2015, then shut off Sanders' access to the party voter database last December, with the first primaries and caucuses just a few weeks out. Cartoons on the Democratic Party View All 45 Images If Republicans tried to do these sort of favors for their establishment candidate, former Gov. Jeb Bush, Republicans would clean house at the national committee, and party chairman Reince Priebus would be looking for a job. And here's the best part, as far as Republicans are concerned: It's going to hurt the Democrats, and badly. Sanders has won four of the last five Democratic primaries and is poised to win more in the final big day of primaries on June 7. He's not dropping out, even if Clinton clinches the nomination as expected. He wants to go to Philadelphia, accept the nomination if Clinton is indicted or otherwise can't claim the mantle, and push for progressive planks in the Democratic platform. He has suggested things could get interesting. Asked if the convention could get rough, Sanders said: "So what? Democracy is messy. Everyday my life is messy. But if you want everything to be quiet and orderly and allow, you know, just things to proceed without vigorous debate … that is not what democracy is about." Meanwhile, Clinton's double-digit lead over Trump has been erased. Some polls even have the mogul ahead. Clinton is not a great campaigner. Republicans have begun to line up behind Trump. And Trump and Sanders have come to have something in common that poses a serious threat to Clinton – both poll well among independents. Thus, one recent poll said only two-thirds of Sanders' primary supporters planned to vote for Clinton if she is the nominee. In fact, one in six in most polls, but up to 20 percent in others, say they would vote for Trump rather than Clinton. In short, the winner of the Bernie Sanders-Clinton/Wasserman Schultz/Democratic machine spat is Donald Trump. The Democrats can't unite. Their candidate is uninspiring on the stump. The apparent loser of the primaries has all the momentum. And those disaffected over how he has been treated might well bolt for the Donald. In which case, Bernie Sanders may not appear on the presidential ballot, but his supporters may well determine the outcome.
package de.sstoehr.harreader.model; import org.junit.Assert; import org.junit.Test; public class HttpStatusTest { @Test public void testByCode() { for (HttpStatus status : HttpStatus.values()) { Assert.assertEquals(status, HttpStatus.byCode(status.getCode())); } } @Test public void test302() { Assert.assertEquals(HttpStatus.FOUND, HttpStatus.byCode(302)); } @Test public void testInvalidCode() { Assert.assertEquals(HttpStatus.UNKNOWN_HTTP_STATUS, HttpStatus.byCode(0)); Assert.assertEquals(HttpStatus.UNKNOWN_HTTP_STATUS, HttpStatus.byCode(1000)); Assert.assertEquals(HttpStatus.UNKNOWN_HTTP_STATUS, HttpStatus.byCode(-999)); } }
def update_apps(app_id, app_name): if app_id not in app_ids_set: app_names.append([app_id, app_name])
/** * Class for handling the BGP peer sessions. * There is one instance per each BGP peer session. */ public class BgpSession extends SimpleChannelHandler { private static final Logger log = LoggerFactory.getLogger(BgpSession.class); private final BgpSessionManager bgpSessionManager; // Local flag to indicate the session is closed. // It is used to avoid the Netty's asynchronous closing of a channel. private boolean isClosed = false; private SocketAddress remoteAddress; // Peer IP addr/port private Ip4Address remoteIp4Address; // Peer IPv4 address private int remoteBgpVersion; // 1 octet private long remoteAs; // 2 octets private long remoteHoldtime; // 2 octets private Ip4Address remoteBgpId; // 4 octets -> IPv4 address // private SocketAddress localAddress; // Local IP addr/port private Ip4Address localIp4Address; // Local IPv4 address private int localBgpVersion; // 1 octet private long localAs; // 2 octets private long localHoldtime; // 2 octets private Ip4Address localBgpId; // 4 octets -> IPv4 address // private long localKeepaliveInterval; // Keepalive interval // Timers state private Timer timer = new HashedWheelTimer(); private volatile Timeout keepaliveTimeout; // Periodic KEEPALIVE private volatile Timeout sessionTimeout; // Session timeout // BGP RIB-IN routing entries from this peer private ConcurrentMap<Ip4Prefix, BgpRouteEntry> bgpRibIn = new ConcurrentHashMap<>(); /** * Constructor for a given BGP Session Manager. * * @param bgpSessionManager the BGP Session Manager to use */ BgpSession(BgpSessionManager bgpSessionManager) { this.bgpSessionManager = bgpSessionManager; } /** * Gets the BGP RIB-IN routing entries. * * @return the BGP RIB-IN routing entries */ public Collection<BgpRouteEntry> getBgpRibIn() { return bgpRibIn.values(); } /** * Finds a BGP routing entry in the BGP RIB-IN. * * @param prefix the prefix of the route to search for * @return the BGP routing entry if found, otherwise null */ public BgpRouteEntry findBgpRouteEntry(Ip4Prefix prefix) { return bgpRibIn.get(prefix); } /** * Gets the BGP session remote address. * * @return the BGP session remote address */ public SocketAddress getRemoteAddress() { return remoteAddress; } /** * Gets the BGP session remote IPv4 address. * * @return the BGP session remote IPv4 address */ public Ip4Address getRemoteIp4Address() { return remoteIp4Address; } /** * Gets the BGP session remote BGP version. * * @return the BGP session remote BGP version */ public int getRemoteBgpVersion() { return remoteBgpVersion; } /** * Gets the BGP session remote AS number. * * @return the BGP session remote AS number */ public long getRemoteAs() { return remoteAs; } /** * Gets the BGP session remote Holdtime. * * @return the BGP session remote Holdtime */ public long getRemoteHoldtime() { return remoteHoldtime; } /** * Gets the BGP session remote BGP Identifier as an IPv4 address. * * @return the BGP session remote BGP Identifier as an IPv4 address */ public Ip4Address getRemoteBgpId() { return remoteBgpId; } /** * Gets the BGP session local address. * * @return the BGP session local address */ public SocketAddress getLocalAddress() { return localAddress; } /** * Gets the BGP session local BGP version. * * @return the BGP session local BGP version */ public int getLocalBgpVersion() { return localBgpVersion; } /** * Gets the BGP session local AS number. * * @return the BGP session local AS number */ public long getLocalAs() { return localAs; } /** * Gets the BGP session local Holdtime. * * @return the BGP session local Holdtime */ public long getLocalHoldtime() { return localHoldtime; } /** * Gets the BGP session local BGP Identifier as an IPv4 address. * * @return the BGP session local BGP Identifier as an IPv4 address */ public Ip4Address getLocalBgpId() { return localBgpId; } /** * Tests whether the session is closed. * <p> * NOTE: We use this method to avoid the Netty's asynchronous closing * of a channel. * </p> * @return true if the session is closed */ boolean isClosed() { return isClosed; } /** * Closes the session. * * @param ctx the Channel Handler Context */ void closeSession(ChannelHandlerContext ctx) { timer.stop(); closeChannel(ctx); } /** * Closes the Netty channel. * * @param ctx the Channel Handler Context */ void closeChannel(ChannelHandlerContext ctx) { isClosed = true; ctx.getChannel().close(); } @Override public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent channelEvent) { bgpSessionManager.addSessionChannel(channelEvent.getChannel()); } @Override public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent channelEvent) { bgpSessionManager.removeSessionChannel(channelEvent.getChannel()); } @Override public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent channelEvent) { localAddress = ctx.getChannel().getLocalAddress(); remoteAddress = ctx.getChannel().getRemoteAddress(); // Assign the local and remote IPv4 addresses InetAddress inetAddr; if (localAddress instanceof InetSocketAddress) { inetAddr = ((InetSocketAddress) localAddress).getAddress(); localIp4Address = Ip4Address.valueOf(inetAddr.getAddress()); } if (remoteAddress instanceof InetSocketAddress) { inetAddr = ((InetSocketAddress) remoteAddress).getAddress(); remoteIp4Address = Ip4Address.valueOf(inetAddr.getAddress()); } log.debug("BGP Session Connected from {} on {}", remoteAddress, localAddress); if (!bgpSessionManager.peerConnected(this)) { log.debug("Cannot setup BGP Session Connection from {}. Closing...", remoteAddress); ctx.getChannel().close(); } } @Override public void channelDisconnected(ChannelHandlerContext ctx, ChannelStateEvent channelEvent) { log.debug("BGP Session Disconnected from {} on {}", ctx.getChannel().getRemoteAddress(), ctx.getChannel().getLocalAddress()); processChannelDisconnected(); } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { log.debug("BGP Session Exception Caught from {} on {}: {}", ctx.getChannel().getRemoteAddress(), ctx.getChannel().getLocalAddress(), e); processChannelDisconnected(); } /** * Processes the channel being disconnected. */ private void processChannelDisconnected() { // // Withdraw the routes advertised by this BGP peer // // NOTE: We must initialize the RIB-IN before propagating the withdraws // for further processing. Otherwise, the BGP Decision Process // will use those routes again. // Collection<BgpRouteEntry> deletedRoutes = bgpRibIn.values(); bgpRibIn = new ConcurrentHashMap<>(); // Push the updates to the BGP Merged RIB BgpSessionManager.BgpRouteSelector bgpRouteSelector = bgpSessionManager.getBgpRouteSelector(); Collection<BgpRouteEntry> addedRoutes = Collections.emptyList(); bgpRouteSelector.routeUpdates(this, addedRoutes, deletedRoutes); bgpSessionManager.peerDisconnected(this); } /** * Processes BGP OPEN message. * * @param ctx the Channel Handler Context * @param message the message to process */ void processBgpOpen(ChannelHandlerContext ctx, ChannelBuffer message) { int minLength = BgpConstants.BGP_OPEN_MIN_LENGTH - BgpConstants.BGP_HEADER_LENGTH; if (message.readableBytes() < minLength) { log.debug("BGP RX OPEN Error from {}: " + "Message length {} too short. Must be at least {}", remoteAddress, message.readableBytes(), minLength); // // ERROR: Bad Message Length // // Send NOTIFICATION and close the connection ChannelBuffer txMessage = prepareBgpNotificationBadMessageLength( message.readableBytes() + BgpConstants.BGP_HEADER_LENGTH); ctx.getChannel().write(txMessage); closeSession(ctx); return; } // // Parse the OPEN message // // Remote BGP version remoteBgpVersion = message.readUnsignedByte(); if (remoteBgpVersion != BgpConstants.BGP_VERSION) { log.debug("BGP RX OPEN Error from {}: " + "Unsupported BGP version {}. Should be {}", remoteAddress, remoteBgpVersion, BgpConstants.BGP_VERSION); // // ERROR: Unsupported Version Number // // Send NOTIFICATION and close the connection int errorCode = OpenMessageError.ERROR_CODE; int errorSubcode = OpenMessageError.UNSUPPORTED_VERSION_NUMBER; ChannelBuffer data = ChannelBuffers.buffer(2); data.writeShort(BgpConstants.BGP_VERSION); ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, data); ctx.getChannel().write(txMessage); closeSession(ctx); return; } // Remote AS number remoteAs = message.readUnsignedShort(); // // Verify that the AS number is same for all other BGP Sessions // NOTE: This check applies only for our use-case where all BGP // sessions are iBGP. // for (BgpSession bgpSession : bgpSessionManager.getBgpSessions()) { if (remoteAs != bgpSession.getRemoteAs()) { log.debug("BGP RX OPEN Error from {}: Bad Peer AS {}. " + "Expected {}", remoteAddress, remoteAs, bgpSession.getRemoteAs()); // // ERROR: Bad Peer AS // // Send NOTIFICATION and close the connection int errorCode = OpenMessageError.ERROR_CODE; int errorSubcode = OpenMessageError.BAD_PEER_AS; ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, null); ctx.getChannel().write(txMessage); closeSession(ctx); return; } } // Remote Hold Time remoteHoldtime = message.readUnsignedShort(); if ((remoteHoldtime != 0) && (remoteHoldtime < BgpConstants.BGP_KEEPALIVE_MIN_HOLDTIME)) { log.debug("BGP RX OPEN Error from {}: " + "Unacceptable Hold Time field {}. " + "Should be 0 or at least {}", remoteAddress, remoteHoldtime, BgpConstants.BGP_KEEPALIVE_MIN_HOLDTIME); // // ERROR: Unacceptable Hold Time // // Send NOTIFICATION and close the connection int errorCode = OpenMessageError.ERROR_CODE; int errorSubcode = OpenMessageError.UNACCEPTABLE_HOLD_TIME; ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, null); ctx.getChannel().write(txMessage); closeSession(ctx); return; } // Remote BGP Identifier remoteBgpId = Ip4Address.valueOf((int) message.readUnsignedInt()); // Optional Parameters int optParamLen = message.readUnsignedByte(); if (message.readableBytes() < optParamLen) { log.debug("BGP RX OPEN Error from {}: " + "Invalid Optional Parameter Length field {}. " + "Remaining Optional Parameters {}", remoteAddress, optParamLen, message.readableBytes()); // // ERROR: Invalid Optional Parameter Length field: Unspecific // // Send NOTIFICATION and close the connection int errorCode = OpenMessageError.ERROR_CODE; int errorSubcode = Notifications.ERROR_SUBCODE_UNSPECIFIC; ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, null); ctx.getChannel().write(txMessage); closeSession(ctx); return; } // NOTE: Parse the optional parameters (if needed) message.readBytes(optParamLen); // NOTE: data ignored // // Copy some of the remote peer's state/setup to the local setup: // - BGP version // - AS number (NOTE: the peer setup is always iBGP) // - Holdtime // Also, assign the local BGP ID based on the local setup // localBgpVersion = remoteBgpVersion; localAs = remoteAs; localHoldtime = remoteHoldtime; localBgpId = bgpSessionManager.getMyBgpId(); // Set the Keepalive interval if (localHoldtime == 0) { localKeepaliveInterval = 0; } else { localKeepaliveInterval = Math.max(localHoldtime / BgpConstants.BGP_KEEPALIVE_PER_HOLD_INTERVAL, BgpConstants.BGP_KEEPALIVE_MIN_INTERVAL); } log.debug("BGP RX OPEN message from {}: " + "BGPv{} AS {} BGP-ID {} Holdtime {}", remoteAddress, remoteBgpVersion, remoteAs, remoteBgpId, remoteHoldtime); // Send my OPEN followed by KEEPALIVE ChannelBuffer txMessage = prepareBgpOpen(); ctx.getChannel().write(txMessage); // txMessage = prepareBgpKeepalive(); ctx.getChannel().write(txMessage); // Start the KEEPALIVE timer restartKeepaliveTimer(ctx); // Start the Session Timeout timer restartSessionTimeoutTimer(ctx); } /** * Processes BGP UPDATE message. * * @param ctx the Channel Handler Context * @param message the message to process */ void processBgpUpdate(ChannelHandlerContext ctx, ChannelBuffer message) { Collection<BgpRouteEntry> addedRoutes = null; Map<Ip4Prefix, BgpRouteEntry> deletedRoutes = new HashMap<>(); int minLength = BgpConstants.BGP_UPDATE_MIN_LENGTH - BgpConstants.BGP_HEADER_LENGTH; if (message.readableBytes() < minLength) { log.debug("BGP RX UPDATE Error from {}: " + "Message length {} too short. Must be at least {}", remoteAddress, message.readableBytes(), minLength); // // ERROR: Bad Message Length // // Send NOTIFICATION and close the connection ChannelBuffer txMessage = prepareBgpNotificationBadMessageLength( message.readableBytes() + BgpConstants.BGP_HEADER_LENGTH); ctx.getChannel().write(txMessage); closeSession(ctx); return; } log.debug("BGP RX UPDATE message from {}", remoteAddress); // // Parse the UPDATE message // // // Parse the Withdrawn Routes // int withdrawnRoutesLength = message.readUnsignedShort(); if (withdrawnRoutesLength > message.readableBytes()) { // ERROR: Malformed Attribute List actionsBgpUpdateMalformedAttributeList(ctx); return; } Collection<Ip4Prefix> withdrawnPrefixes = null; try { withdrawnPrefixes = parsePackedPrefixes(withdrawnRoutesLength, message); } catch (BgpParseException e) { // ERROR: Invalid Network Field log.debug("Exception parsing Withdrawn Prefixes from BGP peer {}: ", remoteBgpId, e); actionsBgpUpdateInvalidNetworkField(ctx); return; } for (Ip4Prefix prefix : withdrawnPrefixes) { log.debug("BGP RX UPDATE message WITHDRAWN from {}: {}", remoteAddress, prefix); BgpRouteEntry bgpRouteEntry = bgpRibIn.get(prefix); if (bgpRouteEntry != null) { deletedRoutes.put(prefix, bgpRouteEntry); } } // // Parse the Path Attributes // try { addedRoutes = parsePathAttributes(ctx, message); } catch (BgpParseException e) { log.debug("Exception parsing Path Attributes from BGP peer {}: ", remoteBgpId, e); // NOTE: The session was already closed, so nothing else to do return; } // Ignore WITHDRAWN routes that are ADDED for (BgpRouteEntry bgpRouteEntry : addedRoutes) { deletedRoutes.remove(bgpRouteEntry.prefix()); } // Update the BGP RIB-IN for (BgpRouteEntry bgpRouteEntry : deletedRoutes.values()) { bgpRibIn.remove(bgpRouteEntry.prefix()); } for (BgpRouteEntry bgpRouteEntry : addedRoutes) { bgpRibIn.put(bgpRouteEntry.prefix(), bgpRouteEntry); } // Push the updates to the BGP Merged RIB BgpSessionManager.BgpRouteSelector bgpRouteSelector = bgpSessionManager.getBgpRouteSelector(); bgpRouteSelector.routeUpdates(this, addedRoutes, deletedRoutes.values()); // Start the Session Timeout timer restartSessionTimeoutTimer(ctx); } /** * Parse BGP Path Attributes from the BGP UPDATE message. * * @param ctx the Channel Handler Context * @param message the message to parse * @return a collection of the result BGP Route Entries * @throws BgpParseException */ private Collection<BgpRouteEntry> parsePathAttributes( ChannelHandlerContext ctx, ChannelBuffer message) throws BgpParseException { Map<Ip4Prefix, BgpRouteEntry> addedRoutes = new HashMap<>(); // // Parsed values // Short origin = -1; // Mandatory BgpRouteEntry.AsPath asPath = null; // Mandatory Ip4Address nextHop = null; // Mandatory long multiExitDisc = // Optional BgpConstants.Update.MultiExitDisc.LOWEST_MULTI_EXIT_DISC; Long localPref = null; // Mandatory Long aggregatorAsNumber = null; // Optional: unused Ip4Address aggregatorIpAddress = null; // Optional: unused // // Get and verify the Path Attributes Length // int pathAttributeLength = message.readUnsignedShort(); if (pathAttributeLength > message.readableBytes()) { // ERROR: Malformed Attribute List actionsBgpUpdateMalformedAttributeList(ctx); String errorMsg = "Malformed Attribute List"; throw new BgpParseException(errorMsg); } if (pathAttributeLength == 0) { return addedRoutes.values(); } // // Parse the Path Attributes // int pathAttributeEnd = message.readerIndex() + pathAttributeLength; while (message.readerIndex() < pathAttributeEnd) { int attrFlags = message.readUnsignedByte(); if (message.readerIndex() >= pathAttributeEnd) { // ERROR: Malformed Attribute List actionsBgpUpdateMalformedAttributeList(ctx); String errorMsg = "Malformed Attribute List"; throw new BgpParseException(errorMsg); } int attrTypeCode = message.readUnsignedByte(); // The Attribute Flags boolean optionalBit = ((0x80 & attrFlags) != 0); boolean transitiveBit = ((0x40 & attrFlags) != 0); boolean partialBit = ((0x20 & attrFlags) != 0); boolean extendedLengthBit = ((0x10 & attrFlags) != 0); // The Attribute Length int attrLen = 0; int attrLenOctets = 1; if (extendedLengthBit) { attrLenOctets = 2; } if (message.readerIndex() + attrLenOctets > pathAttributeEnd) { // ERROR: Malformed Attribute List actionsBgpUpdateMalformedAttributeList(ctx); String errorMsg = "Malformed Attribute List"; throw new BgpParseException(errorMsg); } if (extendedLengthBit) { attrLen = message.readUnsignedShort(); } else { attrLen = message.readUnsignedByte(); } if (message.readerIndex() + attrLen > pathAttributeEnd) { // ERROR: Malformed Attribute List actionsBgpUpdateMalformedAttributeList(ctx); String errorMsg = "Malformed Attribute List"; throw new BgpParseException(errorMsg); } // // Verify the Attribute Flags // verifyBgpUpdateAttributeFlags(ctx, attrTypeCode, attrLen, attrFlags, message); // // Extract the Attribute Value based on the Attribute Type Code // switch (attrTypeCode) { case BgpConstants.Update.Origin.TYPE: // Attribute Type Code ORIGIN origin = parseAttributeTypeOrigin(ctx, attrTypeCode, attrLen, attrFlags, message); break; case BgpConstants.Update.AsPath.TYPE: // Attribute Type Code AS_PATH asPath = parseAttributeTypeAsPath(ctx, attrTypeCode, attrLen, attrFlags, message); break; case BgpConstants.Update.NextHop.TYPE: // Attribute Type Code NEXT_HOP nextHop = parseAttributeTypeNextHop(ctx, attrTypeCode, attrLen, attrFlags, message); break; case BgpConstants.Update.MultiExitDisc.TYPE: // Attribute Type Code MULTI_EXIT_DISC multiExitDisc = parseAttributeTypeMultiExitDisc(ctx, attrTypeCode, attrLen, attrFlags, message); break; case BgpConstants.Update.LocalPref.TYPE: // Attribute Type Code LOCAL_PREF localPref = parseAttributeTypeLocalPref(ctx, attrTypeCode, attrLen, attrFlags, message); break; case BgpConstants.Update.AtomicAggregate.TYPE: // Attribute Type Code ATOMIC_AGGREGATE parseAttributeTypeAtomicAggregate(ctx, attrTypeCode, attrLen, attrFlags, message); // Nothing to do: this attribute is primarily informational break; case BgpConstants.Update.Aggregator.TYPE: // Attribute Type Code AGGREGATOR Pair<Long, Ip4Address> aggregator = parseAttributeTypeAggregator(ctx, attrTypeCode, attrLen, attrFlags, message); aggregatorAsNumber = aggregator.getLeft(); aggregatorIpAddress = aggregator.getRight(); break; default: // NOTE: Parse any new Attribute Types if needed if (!optionalBit) { // ERROR: Unrecognized Well-known Attribute actionsBgpUpdateUnrecognizedWellKnownAttribute( ctx, attrTypeCode, attrLen, attrFlags, message); String errorMsg = "Unrecognized Well-known Attribute: " + attrTypeCode; throw new BgpParseException(errorMsg); } // Skip the data from the unrecognized attribute log.debug("BGP RX UPDATE message from {}: " + "Unrecognized Attribute Type {}", remoteAddress, attrTypeCode); message.skipBytes(attrLen); break; } } // // Verify the Well-known Attributes // verifyBgpUpdateWellKnownAttributes(ctx, origin, asPath, nextHop, localPref); // // Parse the NLRI (Network Layer Reachability Information) // Collection<Ip4Prefix> addedPrefixes = null; int nlriLength = message.readableBytes(); try { addedPrefixes = parsePackedPrefixes(nlriLength, message); } catch (BgpParseException e) { // ERROR: Invalid Network Field log.debug("Exception parsing NLRI from BGP peer {}: ", remoteBgpId, e); actionsBgpUpdateInvalidNetworkField(ctx); // Rethrow the exception throw e; } // Generate the added routes for (Ip4Prefix prefix : addedPrefixes) { BgpRouteEntry bgpRouteEntry = new BgpRouteEntry(this, prefix, nextHop, origin.byteValue(), asPath, localPref); bgpRouteEntry.setMultiExitDisc(multiExitDisc); if (bgpRouteEntry.hasAsPathLoop(localAs)) { log.debug("BGP RX UPDATE message IGNORED from {}: {} " + "nextHop {}: contains AS Path loop", remoteAddress, prefix, nextHop); continue; } else { log.debug("BGP RX UPDATE message ADDED from {}: {} nextHop {}", remoteAddress, prefix, nextHop); } addedRoutes.put(prefix, bgpRouteEntry); } return addedRoutes.values(); } /** * Verifies BGP UPDATE Well-known Attributes. * * @param ctx the Channel Handler Context * @param origin the ORIGIN well-known mandatory attribute * @param asPath the AS_PATH well-known mandatory attribute * @param nextHop the NEXT_HOP well-known mandatory attribute * @param localPref the LOCAL_PREF required attribute * @throws BgpParseException */ private void verifyBgpUpdateWellKnownAttributes( ChannelHandlerContext ctx, Short origin, BgpRouteEntry.AsPath asPath, Ip4Address nextHop, Long localPref) throws BgpParseException { // // Check for Missing Well-known Attributes // if ((origin == null) || (origin == -1)) { // Missing Attribute Type Code ORIGIN int type = BgpConstants.Update.Origin.TYPE; actionsBgpUpdateMissingWellKnownAttribute(ctx, type); String errorMsg = "Missing Well-known Attribute: ORIGIN"; throw new BgpParseException(errorMsg); } if (asPath == null) { // Missing Attribute Type Code AS_PATH int type = BgpConstants.Update.AsPath.TYPE; actionsBgpUpdateMissingWellKnownAttribute(ctx, type); String errorMsg = "Missing Well-known Attribute: AS_PATH"; throw new BgpParseException(errorMsg); } if (nextHop == null) { // Missing Attribute Type Code NEXT_HOP int type = BgpConstants.Update.NextHop.TYPE; actionsBgpUpdateMissingWellKnownAttribute(ctx, type); String errorMsg = "Missing Well-known Attribute: NEXT_HOP"; throw new BgpParseException(errorMsg); } if (localPref == null) { // Missing Attribute Type Code LOCAL_PREF // NOTE: Required for iBGP int type = BgpConstants.Update.LocalPref.TYPE; actionsBgpUpdateMissingWellKnownAttribute(ctx, type); String errorMsg = "Missing Well-known Attribute: LOCAL_PREF"; throw new BgpParseException(errorMsg); } } /** * Verifies the BGP UPDATE Attribute Flags. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message to parse * @throws BgpParseException */ private void verifyBgpUpdateAttributeFlags( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) throws BgpParseException { // // Assign the Attribute Type Name and the Well-known flag // String typeName = "UNKNOWN"; boolean isWellKnown = false; switch (attrTypeCode) { case BgpConstants.Update.Origin.TYPE: isWellKnown = true; typeName = "ORIGIN"; break; case BgpConstants.Update.AsPath.TYPE: isWellKnown = true; typeName = "AS_PATH"; break; case BgpConstants.Update.NextHop.TYPE: isWellKnown = true; typeName = "NEXT_HOP"; break; case BgpConstants.Update.MultiExitDisc.TYPE: isWellKnown = false; typeName = "MULTI_EXIT_DISC"; break; case BgpConstants.Update.LocalPref.TYPE: isWellKnown = true; typeName = "LOCAL_PREF"; break; case BgpConstants.Update.AtomicAggregate.TYPE: isWellKnown = true; typeName = "ATOMIC_AGGREGATE"; break; case BgpConstants.Update.Aggregator.TYPE: isWellKnown = false; typeName = "AGGREGATOR"; break; default: isWellKnown = false; typeName = "UNKNOWN(" + attrTypeCode + ")"; break; } // // Verify the Attribute Flags // boolean optionalBit = ((0x80 & attrFlags) != 0); boolean transitiveBit = ((0x40 & attrFlags) != 0); boolean partialBit = ((0x20 & attrFlags) != 0); if ((isWellKnown && optionalBit) || (isWellKnown && (!transitiveBit)) || (isWellKnown && partialBit) || (optionalBit && (!transitiveBit) && partialBit)) { // // ERROR: The Optional bit cannot be set for Well-known attributes // ERROR: The Transtive bit MUST be 1 for well-known attributes // ERROR: The Partial bit MUST be 0 for well-known attributes // ERROR: The Partial bit MUST be 0 for optional non-transitive // attributes // actionsBgpUpdateAttributeFlagsError( ctx, attrTypeCode, attrLen, attrFlags, message); String errorMsg = "Attribute Flags Error for " + typeName + ": " + attrFlags; throw new BgpParseException(errorMsg); } } /** * Parses BGP UPDATE Attribute Type ORIGIN. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message to parse * @return the parsed ORIGIN value * @throws BgpParseException */ private short parseAttributeTypeOrigin( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) throws BgpParseException { // Check the Attribute Length if (attrLen != BgpConstants.Update.Origin.LENGTH) { // ERROR: Attribute Length Error actionsBgpUpdateAttributeLengthError( ctx, attrTypeCode, attrLen, attrFlags, message); String errorMsg = "Attribute Length Error"; throw new BgpParseException(errorMsg); } message.markReaderIndex(); short origin = message.readUnsignedByte(); switch (origin) { case BgpConstants.Update.Origin.IGP: // FALLTHROUGH case BgpConstants.Update.Origin.EGP: // FALLTHROUGH case BgpConstants.Update.Origin.INCOMPLETE: break; default: // ERROR: Invalid ORIGIN Attribute message.resetReaderIndex(); actionsBgpUpdateInvalidOriginAttribute( ctx, attrTypeCode, attrLen, attrFlags, message, origin); String errorMsg = "Invalid ORIGIN Attribute: " + origin; throw new BgpParseException(errorMsg); } return origin; } /** * Parses BGP UPDATE Attribute AS Path. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message to parse * @return the parsed AS Path * @throws BgpParseException */ private BgpRouteEntry.AsPath parseAttributeTypeAsPath( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) throws BgpParseException { ArrayList<BgpRouteEntry.PathSegment> pathSegments = new ArrayList<>(); // // Parse the message // while (attrLen > 0) { if (attrLen < 2) { // ERROR: Malformed AS_PATH actionsBgpUpdateMalformedAsPath(ctx); String errorMsg = "Malformed AS Path"; throw new BgpParseException(errorMsg); } // Get the Path Segment Type and Length (in number of ASes) short pathSegmentType = message.readUnsignedByte(); short pathSegmentLength = message.readUnsignedByte(); attrLen -= 2; // Verify the Path Segment Type switch (pathSegmentType) { case BgpConstants.Update.AsPath.AS_SET: // FALLTHROUGH case BgpConstants.Update.AsPath.AS_SEQUENCE: // FALLTHROUGH case BgpConstants.Update.AsPath.AS_CONFED_SEQUENCE: // FALLTHROUGH case BgpConstants.Update.AsPath.AS_CONFED_SET: break; default: // ERROR: Invalid Path Segment Type // // NOTE: The BGP Spec (RFC 4271) doesn't contain Error Subcode // for "Invalid Path Segment Type", hence we return // the error as "Malformed AS_PATH". // actionsBgpUpdateMalformedAsPath(ctx); String errorMsg = "Invalid AS Path Segment Type: " + pathSegmentType; throw new BgpParseException(errorMsg); } // Parse the AS numbers if (2 * pathSegmentLength > attrLen) { // ERROR: Malformed AS_PATH actionsBgpUpdateMalformedAsPath(ctx); String errorMsg = "Malformed AS Path"; throw new BgpParseException(errorMsg); } attrLen -= (2 * pathSegmentLength); ArrayList<Long> segmentAsNumbers = new ArrayList<>(); while (pathSegmentLength-- > 0) { long asNumber = message.readUnsignedShort(); segmentAsNumbers.add(asNumber); } BgpRouteEntry.PathSegment pathSegment = new BgpRouteEntry.PathSegment((byte) pathSegmentType, segmentAsNumbers); pathSegments.add(pathSegment); } return new BgpRouteEntry.AsPath(pathSegments); } /** * Parses BGP UPDATE Attribute Type NEXT_HOP. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message to parse * @return the parsed NEXT_HOP value * @throws BgpParseException */ private Ip4Address parseAttributeTypeNextHop( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) throws BgpParseException { // Check the Attribute Length if (attrLen != BgpConstants.Update.NextHop.LENGTH) { // ERROR: Attribute Length Error actionsBgpUpdateAttributeLengthError( ctx, attrTypeCode, attrLen, attrFlags, message); String errorMsg = "Attribute Length Error"; throw new BgpParseException(errorMsg); } message.markReaderIndex(); Ip4Address nextHopAddress = Ip4Address.valueOf((int) message.readUnsignedInt()); // // Check whether the NEXT_HOP IP address is semantically correct. // As per RFC 4271, Section 6.3: // // a) It MUST NOT be the IP address of the receiving speaker // b) In the case of an EBGP .... // // Here we check only (a), because (b) doesn't apply for us: all our // peers are iBGP. // if (nextHopAddress.equals(localIp4Address)) { // ERROR: Invalid NEXT_HOP Attribute message.resetReaderIndex(); actionsBgpUpdateInvalidNextHopAttribute( ctx, attrTypeCode, attrLen, attrFlags, message, nextHopAddress); String errorMsg = "Invalid NEXT_HOP Attribute: " + nextHopAddress; throw new BgpParseException(errorMsg); } return nextHopAddress; } /** * Parses BGP UPDATE Attribute Type MULTI_EXIT_DISC. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message to parse * @return the parsed MULTI_EXIT_DISC value * @throws BgpParseException */ private long parseAttributeTypeMultiExitDisc( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) throws BgpParseException { // Check the Attribute Length if (attrLen != BgpConstants.Update.MultiExitDisc.LENGTH) { // ERROR: Attribute Length Error actionsBgpUpdateAttributeLengthError( ctx, attrTypeCode, attrLen, attrFlags, message); String errorMsg = "Attribute Length Error"; throw new BgpParseException(errorMsg); } long multiExitDisc = message.readUnsignedInt(); return multiExitDisc; } /** * Parses BGP UPDATE Attribute Type LOCAL_PREF. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message to parse * @return the parsed LOCAL_PREF value * @throws BgpParseException */ private long parseAttributeTypeLocalPref( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) throws BgpParseException { // Check the Attribute Length if (attrLen != BgpConstants.Update.LocalPref.LENGTH) { // ERROR: Attribute Length Error actionsBgpUpdateAttributeLengthError( ctx, attrTypeCode, attrLen, attrFlags, message); String errorMsg = "Attribute Length Error"; throw new BgpParseException(errorMsg); } long localPref = message.readUnsignedInt(); return localPref; } /** * Parses BGP UPDATE Attribute Type ATOMIC_AGGREGATE. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message to parse * @throws BgpParseException */ private void parseAttributeTypeAtomicAggregate( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) throws BgpParseException { // Check the Attribute Length if (attrLen != BgpConstants.Update.AtomicAggregate.LENGTH) { // ERROR: Attribute Length Error actionsBgpUpdateAttributeLengthError( ctx, attrTypeCode, attrLen, attrFlags, message); String errorMsg = "Attribute Length Error"; throw new BgpParseException(errorMsg); } // Nothing to do: this attribute is primarily informational } /** * Parses BGP UPDATE Attribute Type AGGREGATOR. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message to parse * @return the parsed AGGREGATOR value: a tuple of <AS-Number, IP-Address> * @throws BgpParseException */ private Pair<Long, Ip4Address> parseAttributeTypeAggregator( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) throws BgpParseException { // Check the Attribute Length if (attrLen != BgpConstants.Update.Aggregator.LENGTH) { // ERROR: Attribute Length Error actionsBgpUpdateAttributeLengthError( ctx, attrTypeCode, attrLen, attrFlags, message); String errorMsg = "Attribute Length Error"; throw new BgpParseException(errorMsg); } // The AGGREGATOR AS number long aggregatorAsNumber = message.readUnsignedShort(); // The AGGREGATOR IP address Ip4Address aggregatorIpAddress = Ip4Address.valueOf((int) message.readUnsignedInt()); Pair<Long, Ip4Address> aggregator = Pair.of(aggregatorAsNumber, aggregatorIpAddress); return aggregator; } /** * Parses a message that contains encoded IPv4 network prefixes. * <p> * The IPv4 prefixes are encoded in the form: * <Length, Prefix> where Length is the length in bits of the IPv4 prefix, * and Prefix is the IPv4 prefix (padded with trailing bits to the end * of an octet). * * @param totalLength the total length of the data to parse * @param message the message with data to parse * @return a collection of parsed IPv4 network prefixes * @throws BgpParseException */ private Collection<Ip4Prefix> parsePackedPrefixes(int totalLength, ChannelBuffer message) throws BgpParseException { Collection<Ip4Prefix> result = new ArrayList<>(); if (totalLength == 0) { return result; } // Parse the data int dataEnd = message.readerIndex() + totalLength; while (message.readerIndex() < dataEnd) { int prefixBitlen = message.readUnsignedByte(); int prefixBytelen = (prefixBitlen + 7) / 8; // Round-up if (message.readerIndex() + prefixBytelen > dataEnd) { String errorMsg = "Malformed Network Prefixes"; throw new BgpParseException(errorMsg); } long address = 0; long extraShift = (4 - prefixBytelen) * 8; while (prefixBytelen > 0) { address <<= 8; address |= message.readUnsignedByte(); prefixBytelen--; } address <<= extraShift; Ip4Prefix prefix = Ip4Prefix.valueOf(Ip4Address.valueOf((int) address), prefixBitlen); result.add(prefix); } return result; } /** * Applies the appropriate actions after detecting BGP UPDATE * Invalid Network Field Error: send NOTIFICATION and close the channel. * * @param ctx the Channel Handler Context */ private void actionsBgpUpdateInvalidNetworkField( ChannelHandlerContext ctx) { log.debug("BGP RX UPDATE Error from {}: Invalid Network Field", remoteAddress); // // ERROR: Invalid Network Field // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.INVALID_NETWORK_FIELD; ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, null); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Applies the appropriate actions after detecting BGP UPDATE * Malformed Attribute List Error: send NOTIFICATION and close the channel. * * @param ctx the Channel Handler Context */ private void actionsBgpUpdateMalformedAttributeList( ChannelHandlerContext ctx) { log.debug("BGP RX UPDATE Error from {}: Malformed Attribute List", remoteAddress); // // ERROR: Malformed Attribute List // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.MALFORMED_ATTRIBUTE_LIST; ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, null); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Applies the appropriate actions after detecting BGP UPDATE * Missing Well-known Attribute Error: send NOTIFICATION and close the * channel. * * @param ctx the Channel Handler Context * @param missingAttrTypeCode the missing attribute type code */ private void actionsBgpUpdateMissingWellKnownAttribute( ChannelHandlerContext ctx, int missingAttrTypeCode) { log.debug("BGP RX UPDATE Error from {}: Missing Well-known Attribute: {}", remoteAddress, missingAttrTypeCode); // // ERROR: Missing Well-known Attribute // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.MISSING_WELL_KNOWN_ATTRIBUTE; ChannelBuffer data = ChannelBuffers.buffer(1); data.writeByte(missingAttrTypeCode); ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, data); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Applies the appropriate actions after detecting BGP UPDATE * Invalid ORIGIN Attribute Error: send NOTIFICATION and close the channel. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message with the data * @param origin the ORIGIN attribute value */ private void actionsBgpUpdateInvalidOriginAttribute( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message, short origin) { log.debug("BGP RX UPDATE Error from {}: Invalid ORIGIN Attribute", remoteAddress); // // ERROR: Invalid ORIGIN Attribute // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.INVALID_ORIGIN_ATTRIBUTE; ChannelBuffer data = prepareBgpUpdateNotificationDataPayload(attrTypeCode, attrLen, attrFlags, message); ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, data); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Applies the appropriate actions after detecting BGP UPDATE * Attribute Flags Error: send NOTIFICATION and close the channel. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message with the data */ private void actionsBgpUpdateAttributeFlagsError( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) { log.debug("BGP RX UPDATE Error from {}: Attribute Flags Error", remoteAddress); // // ERROR: Attribute Flags Error // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.ATTRIBUTE_FLAGS_ERROR; ChannelBuffer data = prepareBgpUpdateNotificationDataPayload(attrTypeCode, attrLen, attrFlags, message); ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, data); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Applies the appropriate actions after detecting BGP UPDATE * Invalid NEXT_HOP Attribute Error: send NOTIFICATION and close the * channel. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message with the data * @param nextHop the NEXT_HOP attribute value */ private void actionsBgpUpdateInvalidNextHopAttribute( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message, Ip4Address nextHop) { log.debug("BGP RX UPDATE Error from {}: Invalid NEXT_HOP Attribute {}", remoteAddress, nextHop); // // ERROR: Invalid ORIGIN Attribute // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.INVALID_NEXT_HOP_ATTRIBUTE; ChannelBuffer data = prepareBgpUpdateNotificationDataPayload(attrTypeCode, attrLen, attrFlags, message); ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, data); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Applies the appropriate actions after detecting BGP UPDATE * Unrecognized Well-known Attribute Error: send NOTIFICATION and close * the channel. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message with the data */ private void actionsBgpUpdateUnrecognizedWellKnownAttribute( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) { log.debug("BGP RX UPDATE Error from {}: " + "Unrecognized Well-known Attribute Error: {}", remoteAddress, attrTypeCode); // // ERROR: Unrecognized Well-known Attribute // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.UNRECOGNIZED_WELL_KNOWN_ATTRIBUTE; ChannelBuffer data = prepareBgpUpdateNotificationDataPayload(attrTypeCode, attrLen, attrFlags, message); ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, data); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Applies the appropriate actions after detecting BGP UPDATE * Attribute Length Error: send NOTIFICATION and close the channel. * * @param ctx the Channel Handler Context * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message with the data */ private void actionsBgpUpdateAttributeLengthError( ChannelHandlerContext ctx, int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) { log.debug("BGP RX UPDATE Error from {}: Attribute Length Error", remoteAddress); // // ERROR: Attribute Length Error // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.ATTRIBUTE_LENGTH_ERROR; ChannelBuffer data = prepareBgpUpdateNotificationDataPayload(attrTypeCode, attrLen, attrFlags, message); ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, data); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Applies the appropriate actions after detecting BGP UPDATE * Malformed AS_PATH Error: send NOTIFICATION and close the channel. * * @param ctx the Channel Handler Context */ private void actionsBgpUpdateMalformedAsPath( ChannelHandlerContext ctx) { log.debug("BGP RX UPDATE Error from {}: Malformed AS Path", remoteAddress); // // ERROR: Malformed AS_PATH // // Send NOTIFICATION and close the connection int errorCode = UpdateMessageError.ERROR_CODE; int errorSubcode = UpdateMessageError.MALFORMED_AS_PATH; ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, null); ctx.getChannel().write(txMessage); closeSession(ctx); } /** * Processes BGP NOTIFICATION message. * * @param ctx the Channel Handler Context * @param message the message to process */ void processBgpNotification(ChannelHandlerContext ctx, ChannelBuffer message) { int minLength = BgpConstants.BGP_NOTIFICATION_MIN_LENGTH - BgpConstants.BGP_HEADER_LENGTH; if (message.readableBytes() < minLength) { log.debug("BGP RX NOTIFICATION Error from {}: " + "Message length {} too short. Must be at least {}", remoteAddress, message.readableBytes(), minLength); // // ERROR: Bad Message Length // // NOTE: We do NOT send NOTIFICATION in response to a notification return; } // // Parse the NOTIFICATION message // int errorCode = message.readUnsignedByte(); int errorSubcode = message.readUnsignedByte(); int dataLength = message.readableBytes(); log.debug("BGP RX NOTIFICATION message from {}: Error Code {} " + "Error Subcode {} Data Length {}", remoteAddress, errorCode, errorSubcode, dataLength); // // NOTE: If the peer sent a NOTIFICATION, we leave it to the peer to // close the connection. // // Start the Session Timeout timer restartSessionTimeoutTimer(ctx); } /** * Processes BGP KEEPALIVE message. * * @param ctx the Channel Handler Context * @param message the message to process */ void processBgpKeepalive(ChannelHandlerContext ctx, ChannelBuffer message) { if (message.readableBytes() + BgpConstants.BGP_HEADER_LENGTH != BgpConstants.BGP_KEEPALIVE_EXPECTED_LENGTH) { log.debug("BGP RX KEEPALIVE Error from {}: " + "Invalid total message length {}. Expected {}", remoteAddress, message.readableBytes() + BgpConstants.BGP_HEADER_LENGTH, BgpConstants.BGP_KEEPALIVE_EXPECTED_LENGTH); // // ERROR: Bad Message Length // // Send NOTIFICATION and close the connection ChannelBuffer txMessage = prepareBgpNotificationBadMessageLength( message.readableBytes() + BgpConstants.BGP_HEADER_LENGTH); ctx.getChannel().write(txMessage); closeSession(ctx); return; } // // Parse the KEEPALIVE message: nothing to do // log.trace("BGP RX KEEPALIVE message from {}", remoteAddress); // Start the Session Timeout timer restartSessionTimeoutTimer(ctx); } /** * Prepares BGP OPEN message. * * @return the message to transmit (BGP header included) */ private ChannelBuffer prepareBgpOpen() { ChannelBuffer message = ChannelBuffers.buffer(BgpConstants.BGP_MESSAGE_MAX_LENGTH); // // Prepare the OPEN message payload // message.writeByte(localBgpVersion); message.writeShort((int) localAs); message.writeShort((int) localHoldtime); message.writeInt(bgpSessionManager.getMyBgpId().toInt()); message.writeByte(0); // No Optional Parameters return prepareBgpMessage(BgpConstants.BGP_TYPE_OPEN, message); } /** * Prepares BGP KEEPALIVE message. * * @return the message to transmit (BGP header included) */ private ChannelBuffer prepareBgpKeepalive() { ChannelBuffer message = ChannelBuffers.buffer(BgpConstants.BGP_MESSAGE_MAX_LENGTH); // // Prepare the KEEPALIVE message payload: nothing to do // return prepareBgpMessage(BgpConstants.BGP_TYPE_KEEPALIVE, message); } /** * Prepares BGP NOTIFICATION message. * * @param errorCode the BGP NOTIFICATION Error Code * @param errorSubcode the BGP NOTIFICATION Error Subcode if applicable, * otherwise BgpConstants.Notifications.ERROR_SUBCODE_UNSPECIFIC * @param data the BGP NOTIFICATION Data if applicable, otherwise null * @return the message to transmit (BGP header included) */ ChannelBuffer prepareBgpNotification(int errorCode, int errorSubcode, ChannelBuffer data) { ChannelBuffer message = ChannelBuffers.buffer(BgpConstants.BGP_MESSAGE_MAX_LENGTH); // // Prepare the NOTIFICATION message payload // message.writeByte(errorCode); message.writeByte(errorSubcode); if (data != null) { message.writeBytes(data); } return prepareBgpMessage(BgpConstants.BGP_TYPE_NOTIFICATION, message); } /** * Prepares BGP NOTIFICATION message: Bad Message Length. * * @param length the erroneous Length field * @return the message to transmit (BGP header included) */ ChannelBuffer prepareBgpNotificationBadMessageLength(int length) { int errorCode = MessageHeaderError.ERROR_CODE; int errorSubcode = MessageHeaderError.BAD_MESSAGE_LENGTH; ChannelBuffer data = ChannelBuffers.buffer(2); data.writeShort(length); return prepareBgpNotification(errorCode, errorSubcode, data); } /** * Prepares BGP UPDATE Notification data payload. * * @param attrTypeCode the attribute type code * @param attrLen the attribute length (in octets) * @param attrFlags the attribute flags * @param message the message with the data * @return the buffer with the data payload for the BGP UPDATE Notification */ private ChannelBuffer prepareBgpUpdateNotificationDataPayload( int attrTypeCode, int attrLen, int attrFlags, ChannelBuffer message) { // Compute the attribute length field octets boolean extendedLengthBit = ((0x10 & attrFlags) != 0); int attrLenOctets = 1; if (extendedLengthBit) { attrLenOctets = 2; } ChannelBuffer data = ChannelBuffers.buffer(attrLen + attrLenOctets + 1); data.writeByte(attrTypeCode); if (extendedLengthBit) { data.writeShort(attrLen); } else { data.writeByte(attrLen); } data.writeBytes(message, attrLen); return data; } /** * Prepares BGP message. * * @param type the BGP message type * @param payload the message payload to transmit (BGP header excluded) * @return the message to transmit (BGP header included) */ private ChannelBuffer prepareBgpMessage(int type, ChannelBuffer payload) { ChannelBuffer message = ChannelBuffers.buffer(BgpConstants.BGP_HEADER_LENGTH + payload.readableBytes()); // Write the marker for (int i = 0; i < BgpConstants.BGP_HEADER_MARKER_LENGTH; i++) { message.writeByte(0xff); } // Write the rest of the BGP header message.writeShort(BgpConstants.BGP_HEADER_LENGTH + payload.readableBytes()); message.writeByte(type); // Write the payload message.writeBytes(payload); return message; } /** * Restarts the BGP KeepaliveTimer. */ private void restartKeepaliveTimer(ChannelHandlerContext ctx) { if (localKeepaliveInterval == 0) { return; // Nothing to do } keepaliveTimeout = timer.newTimeout(new TransmitKeepaliveTask(ctx), localKeepaliveInterval, TimeUnit.SECONDS); } /** * Task class for transmitting KEEPALIVE messages. */ private final class TransmitKeepaliveTask implements TimerTask { private final ChannelHandlerContext ctx; /** * Constructor for given Channel Handler Context. * * @param ctx the Channel Handler Context to use */ TransmitKeepaliveTask(ChannelHandlerContext ctx) { this.ctx = ctx; } @Override public void run(Timeout timeout) throws Exception { if (timeout.isCancelled()) { return; } if (!ctx.getChannel().isOpen()) { return; } // Transmit the KEEPALIVE ChannelBuffer txMessage = prepareBgpKeepalive(); ctx.getChannel().write(txMessage); // Restart the KEEPALIVE timer restartKeepaliveTimer(ctx); } } /** * Restarts the BGP Session Timeout Timer. */ private void restartSessionTimeoutTimer(ChannelHandlerContext ctx) { if (remoteHoldtime == 0) { return; // Nothing to do } if (sessionTimeout != null) { sessionTimeout.cancel(); } sessionTimeout = timer.newTimeout(new SessionTimeoutTask(ctx), remoteHoldtime, TimeUnit.SECONDS); } /** * Task class for BGP Session timeout. */ private final class SessionTimeoutTask implements TimerTask { private final ChannelHandlerContext ctx; /** * Constructor for given Channel Handler Context. * * @param ctx the Channel Handler Context to use */ SessionTimeoutTask(ChannelHandlerContext ctx) { this.ctx = ctx; } @Override public void run(Timeout timeout) throws Exception { if (timeout.isCancelled()) { return; } if (!ctx.getChannel().isOpen()) { return; } log.debug("BGP Session Timeout: peer {}", remoteAddress); // // ERROR: Invalid Optional Parameter Length field: Unspecific // // Send NOTIFICATION and close the connection int errorCode = HoldTimerExpired.ERROR_CODE; int errorSubcode = Notifications.ERROR_SUBCODE_UNSPECIFIC; ChannelBuffer txMessage = prepareBgpNotification(errorCode, errorSubcode, null); ctx.getChannel().write(txMessage); closeChannel(ctx); } } /** * An exception indicating a parsing error of the BGP message. */ private static class BgpParseException extends Exception { /** * Default constructor. */ public BgpParseException() { super(); } /** * Constructor for a specific exception details message. * * @param message the message with the exception details */ public BgpParseException(String message) { super(message); } } }
// Copyright The OpenTelemetry Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package httpconv provides OpenTelemetry HTTP semantic conventions for // tracing telemetry. package httpconv // import "go.opentelemetry.io/otel/semconv/v1.17.0/httpconv" import ( "net/http" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/semconv/internal/v2" semconv "go.opentelemetry.io/otel/semconv/v1.17.0" ) var ( nc = &internal.NetConv{ NetHostNameKey: semconv.NetHostNameKey, NetHostPortKey: semconv.NetHostPortKey, NetPeerNameKey: semconv.NetPeerNameKey, NetPeerPortKey: semconv.NetPeerPortKey, NetSockPeerAddrKey: semconv.NetSockPeerAddrKey, NetSockPeerPortKey: semconv.NetSockPeerPortKey, NetTransportOther: semconv.NetTransportOther, NetTransportTCP: semconv.NetTransportTCP, NetTransportUDP: semconv.NetTransportUDP, NetTransportInProc: semconv.NetTransportInProc, } hc = &internal.HTTPConv{ NetConv: nc, EnduserIDKey: semconv.EnduserIDKey, HTTPClientIPKey: semconv.HTTPClientIPKey, HTTPFlavorKey: semconv.HTTPFlavorKey, HTTPMethodKey: semconv.HTTPMethodKey, HTTPRequestContentLengthKey: semconv.HTTPRequestContentLengthKey, HTTPResponseContentLengthKey: semconv.HTTPResponseContentLengthKey, HTTPRouteKey: semconv.HTTPRouteKey, HTTPSchemeHTTP: semconv.HTTPSchemeHTTP, HTTPSchemeHTTPS: semconv.HTTPSchemeHTTPS, HTTPStatusCodeKey: semconv.HTTPStatusCodeKey, HTTPTargetKey: semconv.HTTPTargetKey, HTTPURLKey: semconv.HTTPURLKey, HTTPUserAgentKey: semconv.HTTPUserAgentKey, } ) // ClientResponse returns trace attributes for an HTTP response received by a // client from a server. It will return the following attributes if the related // values are defined in resp: "http.status.code", // "http.response_content_length". // // This does not add all OpenTelemetry required attributes for an HTTP event, // it assumes ClientRequest was used to create the span with a complete set of // attributes. If a complete set of attributes can be generated using the // request contained in resp. For example: // // append(ClientResponse(resp), ClientRequest(resp.Request)...) func ClientResponse(resp *http.Response) []attribute.KeyValue { return hc.ClientResponse(resp) } // ClientRequest returns trace attributes for an HTTP request made by a client. // The following attributes are always returned: "http.url", "http.flavor", // "http.method", "net.peer.name". The following attributes are returned if the // related values are defined in req: "net.peer.port", "http.user_agent", // "http.request_content_length", "enduser.id". func ClientRequest(req *http.Request) []attribute.KeyValue { return hc.ClientRequest(req) } // ClientStatus returns a span status code and message for an HTTP status code // value received by a client. func ClientStatus(code int) (codes.Code, string) { return hc.ClientStatus(code) } // ServerRequest returns trace attributes for an HTTP request received by a // server. // // The server must be the primary server name if it is known. For example this // would be the ServerName directive // (https://httpd.apache.org/docs/2.4/mod/core.html#servername) for an Apache // server, and the server_name directive // (http://nginx.org/en/docs/http/ngx_http_core_module.html#server_name) for an // nginx server. More generically, the primary server name would be the host // header value that matches the default virtual host of an HTTP server. It // should include the host identifier and if a port is used to route to the // server that port identifier should be included as an appropriate port // suffix. // // If the primary server name is not known, server should be an empty string. // The req Host will be used to determine the server instead. // // The following attributes are always returned: "http.method", "http.scheme", // "http.flavor", "http.target", "net.host.name". The following attributes are // returned if they related values are defined in req: "net.host.port", // "net.sock.peer.addr", "net.sock.peer.port", "http.user_agent", "enduser.id", // "http.client_ip". func ServerRequest(server string, req *http.Request) []attribute.KeyValue { return hc.ServerRequest(server, req) } // ServerStatus returns a span status code and message for an HTTP status code // value returned by a server. Status codes in the 400-499 range are not // returned as errors. func ServerStatus(code int) (codes.Code, string) { return hc.ServerStatus(code) } // RequestHeader returns the contents of h as attributes. // // Instrumentation should require an explicit configuration of which headers to // captured and then prune what they pass here. Including all headers can be a // security risk - explicit configuration helps avoid leaking sensitive // information. // // The User-Agent header is already captured in the http.user_agent attribute // from ClientRequest and ServerRequest. Instrumentation may provide an option // to capture that header here even though it is not recommended. Otherwise, // instrumentation should filter that out of what is passed. func RequestHeader(h http.Header) []attribute.KeyValue { return hc.RequestHeader(h) } // ResponseHeader returns the contents of h as attributes. // // Instrumentation should require an explicit configuration of which headers to // captured and then prune what they pass here. Including all headers can be a // security risk - explicit configuration helps avoid leaking sensitive // information. // // The User-Agent header is already captured in the http.user_agent attribute // from ClientRequest and ServerRequest. Instrumentation may provide an option // to capture that header here even though it is not recommended. Otherwise, // instrumentation should filter that out of what is passed. func ResponseHeader(h http.Header) []attribute.KeyValue { return hc.ResponseHeader(h) }
<gh_stars>10-100 package cat type scheduleMixin struct { isAlive bool signals chan int exitSignal int } type scheduleMixer interface { GetName() string handle(signal int) process() afterStart() beforeStop() getScheduleMixin() *scheduleMixin } func (p *scheduleMixin) handle(signal int) { switch signal { case signalShutdown: p.isAlive = false } } func (p *scheduleMixin) process() { return } func (p *scheduleMixin) afterStart() { return } func (p *scheduleMixin) beforeStop() { return } func (p *scheduleMixin) getScheduleMixin() *scheduleMixin { return p } func background(p scheduleMixer) { mixin := p.getScheduleMixin() mixin.isAlive = true p.afterStart() for mixin.isAlive { p.process() } p.beforeStop() close(mixin.signals) scheduler.signals <- mixin.exitSignal } func makeScheduleMixedIn(exitSignal int) scheduleMixin { return scheduleMixin{ isAlive: false, signals: make(chan int), exitSignal: exitSignal, } } type catScheduler struct { signals chan int } var scheduler = catScheduler{ signals: make(chan int), } func (p *catScheduler) shutdownAndWaitGroup(items []scheduleMixer) { var expectedSignals = make(map[int]string) var count = 0 for _, v := range items { mixin := v.getScheduleMixin() if mixin.isAlive { mixin.signals <- signalShutdown expectedSignals[mixin.exitSignal] = v.GetName() count++ } } if count == 0 { return } for signal := range p.signals { if name, ok := expectedSignals[signal]; ok { count-- logger.Info("%s exited.", name) } else { logger.Warning("Unpredicted signal received: %d", signal) } if count == 0 { break } } } func (p *catScheduler) shutdown() { group1 := []scheduleMixer{&router, &monitor} group2 := []scheduleMixer{aggregator.transaction, aggregator.event, aggregator.metric} group3 := []scheduleMixer{&sender} disable() logger.Info("Received shutdown request, scheduling...") p.shutdownAndWaitGroup(group1) p.shutdownAndWaitGroup(group2) p.shutdownAndWaitGroup(group3) logger.Info("All systems down.") }
/* Assembles an instruction from a string. Returns the number of bytes written to the buffer on success, zero otherwise. Instructions can be separated using either the ';' or '\n' character. Parameters: - string [in] A pointer to a string that represents a instruction in assembly language. - buffer [out] A pointer to a buffer that receives the encoded instructions. - bufferSize [in] The size of the buffer in bytes. - runtimeAddress [in] The instruction's runtime address. You may use 'NMD_X86_INVALID_RUNTIME_ADDRESS'. - mode [in] The architecture mode. 'NMD_X86_MODE_32', 'NMD_X86_MODE_64' or 'NMD_X86_MODE_16'. - count [in/out/opt] A pointer to a variable that on input is the maximum number of instructions that can be parsed(or zero for unlimited instructions), and on output is the number of instructions parsed. This parameter may be 0(zero). */ size_t nmd_x86_assemble(const char* string, void* buffer, size_t bufferSize, uint64_t runtimeAddress, NMD_X86_MODE mode, size_t* count) { char parsedString[128]; const uint8_t* const bufferEnd = (uint8_t*)buffer + bufferSize; uint8_t* b = (uint8_t*)buffer; size_t remainingSize; uint8_t tempBuffer[NMD_X86_MAXIMUM_INSTRUCTION_LENGTH]; AssembleInfo ai; ai.s = parsedString; ai.mode = mode; ai.runtimeAddress = runtimeAddress; ai.b = tempBuffer; size_t numInstructions = 0; const size_t numMaxInstructions = (count && *count != 0) ? *count : (size_t)(-1); while (string[0] != '\0') { if (numInstructions == numMaxInstructions) break; remainingSize = bufferEnd - b; size_t length = 0; bool allowSpace = false; for (; *string; string++) { const char c = *string; if (c == ';' || c == '\n') break; else if (c == ' ' && !allowSpace) continue; if (length >= 128) return 0; const char newChar = (c >= 'A' && c <= 'Z') ? c + 0x20 : c; parsedString[length++] = newChar; allowSpace = (NMD_IS_LOWERCASE(newChar) || NMD_IS_DECIMAL_NUMBER(newChar)) && (NMD_IS_LOWERCASE(*(string + 2)) || NMD_IS_DECIMAL_NUMBER(*(string + 2))); } if (*string != '\0') string++; if (length > 0 && parsedString[length - 1] == ' ') length--; parsedString[length] = '\0'; const size_t numBytes = assembleSingle(&ai); if (numBytes == 0 || numBytes > remainingSize) return 0; size_t i = 0; for (; i < numBytes; i++) b[i] = tempBuffer[i]; b += numBytes; numInstructions++; } if (count) *count = numInstructions; return (size_t)(b - (uint8_t*)buffer); }
/** * A factory for creating a {@link ForEnterValue} offset mapping. */ @HashCodeAndEqualsPlugin.Enhance protected static class Factory implements OffsetMapping.Factory<Enter> { /** * The supplied type of the enter advice. */ private final TypeDefinition enterType; /** * Creates a new factory for creating a {@link ForEnterValue} offset mapping. * * @param enterType The supplied type of the enter method. */ protected Factory(TypeDefinition enterType) { this.enterType = enterType; } /** * Creates a new factory for creating a {@link ForEnterValue} offset mapping. * * @param typeDefinition The supplied type of the enter advice. * @return An appropriate offset mapping factory. */ protected static OffsetMapping.Factory<Enter> of(TypeDefinition typeDefinition) { return typeDefinition.represents(void.class) ? new Illegal<Enter>(Enter.class) : new Factory(typeDefinition); } /** * {@inheritDoc} */ public Class<Enter> getAnnotationType() { return Enter.class; } /** * {@inheritDoc} */ public OffsetMapping make(ParameterDescription.InDefinedShape target, AnnotationDescription.Loadable<Enter> annotation, AdviceType adviceType) { if (adviceType.isDelegation() && !annotation.load().readOnly()) { throw new IllegalStateException("Cannot use writable " + target + " on read-only parameter"); } else { return new ForEnterValue(target.getType(), enterType.asGenericType(), annotation.load()); } } }
// NewTooManyRequestsErrorResponse returns an API gateway HTTP error response for // HTTP 429 TooManyRequests message. func NewTooManyRequestsErrorResponse(message string) (*events.APIGatewayV2HTTPResponse, error) { return NewJSONResponse(429, nil, ErrorMessageResponse{ Code: "TooManyRequestsError", Message: "TooManyRequestsError: " + message, }) }
<gh_stars>0 #include <string> #include <fstream> #include <thread> #include <chrono> #include <cpplog/log.hpp> int main() { std::ofstream error_log_file ("error_log.txt", std::ofstream::out); logging::add_message_sink(error_log_file, logging::Severity::Error); logging::add_message_sink(std::cout, logging::Severity::Trace | logging::Severity::Debug | logging::Severity::Warn | logging::Severity::Error | logging::Severity::Info | logging::Severity::Fatal); TRACE << "trace log" << std::endl; DEBUG << "debug log" << std::endl; INFO << "info log" << std::endl; WARN << "warn log" << std::endl; ERROR << "error log" << std::endl; FATAL << "fatal log" << std::endl; std::thread t1{[]() { std::this_thread::sleep_for(std::chrono::milliseconds(500)); ERROR << "error in thread" << 1 << std::endl; }}; std::thread t2{[]() { std::this_thread::sleep_for(std::chrono::milliseconds(200)); WARN << "warning in thread" << 2 << std::endl; }}; t2.join(); t1.join(); return EXIT_SUCCESS; }
Statins in the 21st century: end of the simple story? The development of the HMG-CoA reductase inhibitors (the statins) has lead to important advances in the management of cardiovascular disease. There have several landmark mortality and morbidity clinical trials with the statins. The 4S (Scandinavian Simvastatin Survival Study) was the first large-scale randomised cholesterol-lowering trial to show a decrease in mortality. In patients with coronary heart disease and relatively high cholesterol, simvastatin decreased mortality, hospital stays, the risk of undergoing myocardial re-vascularisation, stroke and transient ischaemic attack. The CARE (Cholesterol and Recurrent Events) trial showed that lowering average cholesterol levels after myocardial infarction with pravastatin reduced a composite primary end point of coronary mortality and myocardial infarction, coronary bypass surgery, angioplasty and strokes. The LIPID (Long-term Intervention with Pravastatin in Ischaaemic Disease) study showed that lowering average cholesterol levels after previous myocardial infarction or unstable angina reduced mortality. WOSCOPS (The West of Scotland Coronary Prevention Study) was the first trial to demonstrate the benefit of pravastatin, as primary prevention for cardiovascular disease, in men with high cholesterol levels. AFCAPS/Tex CAPS (The Air Force/Texas Coronary Atherosclerosis Prevention Study) showed that the benefits of lowering cholesterol levels were also evident in healthy men and women who initially had average cholesterol levels. Rather surprisingly the reductions in mortality and morbidity with statins are only associated with small improvements in coronary angiographic findings. A preliminary study indicated than lovastatin prevented restenosis, but larger and better-controlled studies indicate that the statins do not have beneficial effects in restenosis. Effects other than lipid-lowering or as a consequence of their lipid-lowering may contribute to the beneficial effects of statins. These effects include improvement in vascular endothelial function, cardiac remodelling, changes in blood rheology, anti-oxidant, anti-inflammatory and anti-hypertensive actions.
<reponame>kintel/iree # Lint as: python3 # Copyright 2020 The IREE Authors # # Licensed under the Apache License v2.0 with LLVM Exceptions. # See https://llvm.org/LICENSE.txt for license information. # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception """Batch To Space ND tests.""" from absl import app from iree.tf.support import tf_test_utils from iree.tf.support import tf_utils import numpy as np import tensorflow.compat.v2 as tf class BatchtoSpaceModule(tf.Module): @tf.function(input_signature=[tf.TensorSpec([3, 5, 2], tf.float32)]) def batch_to_space_nd(self, batched): block_shape = [3] paddings = [[3, 4]] return tf.compat.v1.batch_to_space_nd(batched, block_shape, paddings) class BatchtoSpaceTest(tf_test_utils.TracedModuleTestCase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._modules = tf_test_utils.compile_tf_module(BatchtoSpaceModule) def test_space_to_batch_inference(self): def space_to_batch_inference(module): x = np.linspace(0, 29, 30, dtype=np.float32) x = np.reshape(x, [3, 5, 2]) module.batch_to_space_nd(x) self.compare_backends(space_to_batch_inference, self._modules) def main(argv): del argv # Unused if hasattr(tf, 'enable_v2_behavior'): tf.enable_v2_behavior() tf.test.main() if __name__ == '__main__': app.run(main)
def create_names(self): self.opposite_names = characterDict.CreateDict(os.path.abspath( "names/opposite_names.csv")) self.they_names = characterDict.CreateDict(os.path.abspath( "names/they_names.csv")) self.she_names = characterDict.CreateDict(os.path.abspath( "names/she_names.csv")) self.he_names = characterDict.CreateDict(os.path.abspath( "names/he_names.csv"))
/** * Parse a string of realization (DLRS) to the realization object * @throws NewickIOException * @throws TopologyException */ public static Realisation parseDLRSRealisation(String real) throws NewickIOException, TopologyException { PrIMENewickTree tree = PrIMENewickTreeReader.readTree(real, true, true); TimesMap times = tree.getTimesMap(real); String[] names = new String[tree.getNoOfVertices()]; boolean[] isdups = new boolean[tree.getNoOfVertices()]; boolean[] istrans = new boolean[tree.getNoOfVertices()]; String[] placements = new String[tree.getNoOfVertices()]; String[] fromTos = new String[tree.getNoOfVertices()]; String[] speciesEdge = new String[tree.getNoOfVertices()]; for(int v =0; v < tree.getNoOfVertices(); v++) { names[v]=tree.getVertex(v).getName(); String meta = tree.getVertex(v).getMeta(); int vertextype = getVertexType(meta); isdups[v] = false; istrans[v] = false; if (vertextype == 2) isdups[v]=true; else if( vertextype == 3) istrans[v]=true; int [] fromtos = {-1, -1, -1}; if (vertextype == 3){ fromtos = getFromToPoints(meta); fromTos[v] = "("+fromtos[0]+","+fromtos[1]+","+fromtos[2]+")"; int[] specieEdgePlacement= getSpeciesEdge(meta); speciesEdge[v]= "("+specieEdgePlacement[0]+","+specieEdgePlacement[1]+")"; } int[] placement = getRealisedPoint(meta); placements[v]= "("+placement[0]+","+placement[1]+")"; } NamesMap Names = new NamesMap("GuestTreeNames", names); BooleanMap isDups = new BooleanMap("RealisationIsDups", isdups); BooleanMap isTrans = new BooleanMap("RealisationIsTrans", istrans); StringMap Placements = new StringMap("DiscPts",placements); StringMap FromTos = new StringMap("fromToLineage",fromTos); StringMap SpeciesEdge =new StringMap("speciesEdge",speciesEdge); RBTree rbtree = new RBTree((NewickTree) tree,""); Realisation realisation = new Realisation((RootedBifurcatingTree) rbtree, Names, times, isDups, isTrans, Placements, FromTos, SpeciesEdge); return (realisation ); }
/** * Serialize the given set of tags, using the given document to create elements. * * @param document The target document * @param tags The set of tags * * @return A serialized set of tags */ public static Element ofTags( final Document document, final Set<String> tags) { Objects.requireNonNull(document, "document"); Objects.requireNonNull(tags, "tags"); final var e_tags = document.createElementNS(NAMESPACE, "c:tags"); for (final var tag : tags) { e_tags.appendChild(ofTag(document, tag)); } return e_tags; }
/* will generate a random integer between 1 and max */ private int _get_random (int max) { int temp; while (true) { temp = (int)(Math.random() * 10.0); if (temp <= max && temp >= 1) { return temp; } } }
def add_records( self, records: List[SeqRecord], record_group_name: str ) -> Tuple[List[str], List[SeqRecord]]: self.logger.info( "Adding {} records to '{}'".format(len(records), record_group_name) ) clean_records(records) keys, records = BioBlast.add_records( records, self.db, span_origin=self.span_origin ) if record_group_name: self.record_groups.setdefault(record_group_name, list()) self.record_groups[record_group_name] += records return keys, records
// // NewClient create and initialize new connection to RPC server. // func NewClient(url *url.URL, timeout time.Duration) (client *Client, err error) { if url == nil { return nil, nil } if timeout == 0 { timeout = defaultTimeout } host, ip, port := libnet.ParseIPPort(url.Host, 0) client = &Client{ url: url, timeout: timeout, } if url.Scheme == schemeIsHTTPS { var insecure bool if ip != nil { insecure = true } if port == 0 { host += ":443" } config := &tls.Config{ ServerName: host, InsecureSkipVerify: insecure, } client.conn, err = tls.Dial("tcp", host, config) } else { if port == 0 { host += ":80" } client.conn, err = net.Dial("tcp", host) } if err != nil { return nil, fmt.Errorf("NewClient: Dial: %w", err) } return client, nil }
/* * Mckoi Software ( http://www.mckoi.com/ ) * Copyright (C) 2000 - 2015 Diehl and Associates, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mckoi.network; import com.mckoi.data.NodeReference; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.DataOutput; import java.io.IOException; import java.io.InputStream; import java.util.Iterator; import java.util.zip.InflaterInputStream; /** * A compressed set of nodes. * * @author <NAME> */ public class CompressedNodeSet implements NodeSet { /** * The set of node_ids stored in this set. */ private final NodeReference[] node_ids; /** * The compressed encoded form of the node set. */ private final byte[] compressed_form; /** * Constructor. */ CompressedNodeSet(NodeReference[] node_ids, byte[] encoded_form) { this.node_ids = node_ids; this.compressed_form = encoded_form; } public Iterator<NodeItemBinary> getNodeSetItems() { return new CompressedIterator(); } public NodeReference[] getNodeIdSet() { return node_ids; } public void writeEncoded(DataOutput dout) throws IOException { dout.writeInt(compressed_form.length); dout.write(compressed_form); } // ----- private class CompressedIterator implements Iterator<NodeItemBinary> { /** * The deflater stream over the compressed item set; */ private final InflaterInputStream comp_in; private final DataInputStream data_in; /** * The index of the current node, */ private int node_index; CompressedIterator() { comp_in = new InflaterInputStream( new ByteArrayInputStream(compressed_form)); data_in = new DataInputStream(comp_in); this.node_index = 0; } public boolean hasNext() { return node_index < node_ids.length; } public NodeItemBinary next() { NodeItemBinary b = new CompressedNodeItem(node_ids[node_index], data_in); ++node_index; return b; } public void remove() { throw new UnsupportedOperationException(); } } private static class CompressedNodeItem implements NodeItemBinary { private final NodeReference node_id; private final DataInputStream data_in; CompressedNodeItem(NodeReference node_id, DataInputStream data_in) { this.node_id = node_id; this.data_in = data_in; } public byte[] asBinary() { return null; } public InputStream getInputStream() { return data_in; } public NodeReference getNodeId() { return node_id; } } }
#include<bits/stdc++.h> using namespace std; int a,n,i,s=0,p=0,x=0; int main() { //freopen("coder.in","r",stdin); //freopen("coder.out","w",stdout); cin>>n; for(i=1;i<=n;i++) { cin>>a; s+=x-a; if(s<0) { p+=-s; s=0; } x=a; } cout<<p<<endl; return 0; }
// updatePolicyKey updates an entry in the PolicyMap for the provided // PolicyUpdateArgs argument. // Adds the entry to the PolicyMap if add is true, otherwise the entry is // deleted. func updatePolicyKey(pa *PolicyUpdateArgs, add bool) { policyMap, err := policymap.Open(pa.path) if err != nil { Fatalf("Cannot open policymap %q : %s", pa.path, err) } for _, proto := range pa.protocols { u8p := u8proto.U8proto(proto) entry := fmt.Sprintf("%d %d/%s", pa.label, pa.port, u8p.String()) if add { var proxyPort uint16 if err := policyMap.Allow(pa.label, pa.port, u8p, pa.trafficDirection, proxyPort); err != nil { Fatalf("Cannot add policy key '%s': %s\n", entry, err) } } else { if err := policyMap.Delete(pa.label, pa.port, u8p, pa.trafficDirection); err != nil { Fatalf("Cannot delete policy key '%s': %s\n", entry, err) } } } }
def delete(self): self._check_exists() self.conn.session.query(model.Match).filter( model.Match.query_id == self.query_id ).delete() self.conn.session.query(model.Query).filter( model.Query.query_id == self.query_id ).delete() self.conn.session.commit()
def play_note(color_string): global bard_song, directioner bard_song += color_string direction_sum(color_string) render_all() libtcod.console_flush() winsound.PlaySound("SystemExit", winsound.SND_ASYNC) if bard_song not in song_of_world: bard_song = '' directioner = [0,0] if len(bard_song) == 5: blast_effect(check_direct()) blast_damage(check_direct()) bard_song = '' directioner = [0,0]
// maybeSkipKeys checks if any keys can be skipped by using a time-bound // iterator. If keys can be skipped, it will update the main iterator to point // to the earliest version of the next candidate key. // It is expected that TBI is at a key <= main iterator key when calling // maybeSkipKeys(). void DBIncrementalIterator::maybeSkipKeys() { if (time_bound_iter == nullptr) { return; } rocksdb::Slice tbi_key; if (!extractKey(time_bound_iter->rep->key(), &tbi_key)) { return; } rocksdb::Slice iter_key; if (!extractKey(iter->rep->key(), &iter_key)) { return; } if (iter_key.compare(tbi_key) > 0) { The case where iterKey == tbiKey, after this call, is the fast-path is when the TBI and the main iterator are in lockstep. In this case, the main iterator was referencing the next key that would be visited by the TBI. This means that for the incremental iterator to perform a Next or NextKey will require only 1 extra NextKey invocation while they remain in lockstep. This could be common if most keys are modified or the modifications are clustered in keyspace. NB: The Seek() below is expensive, so we aim to avoid it if both iterators remain in lockstep as described above. auto state = DBIterNext(time_bound_iter.get(), true ); if (!state.valid) { status = state.status; valid = false; return; } if (!extractKey(time_bound_iter->rep->key(), &tbi_key)) { return; } auto cmp = iter_key.compare(tbi_key); if (cmp > 0) { If the tbiKey is still behind the iterKey, the TBI key may be seeing phantom MVCCKey.Keys. These keys may not be seen by the main iterator due to aborted transactions and keys which have been subsumed due to range tombstones. In this case we can SeekGE() the TBI to the main iterator. DBKey seek_to = {}; NB: We don't ToDBKey as iter_key is already split. seek_to.key = ToDBSlice(iter_key); state = DBIterSeek(time_bound_iter.get(), seek_to); if (!state.valid) { status = state.status; valid = false; return; } if (!extractKey(time_bound_iter->rep->key(), &tbi_key)) { return; } cmp = iter_key.compare(tbi_key); } if (cmp < 0) { In the case that the next MVCC key that the TBI observes is not the same as the main iterator, we may be able to skip over a large group of keys. The main iterator is seeked to the TBI in hopes that many keys were skipped. Note that a Seek() is an order of magnitude more expensive than a Next(). DBKey seek_to = {}; NB: We don't ToDBKey as iter_key is already split. seek_to.key = ToDBSlice(tbi_key); state = DBIterSeek(iter.get(), seek_to); if (!state.valid) { status = state.status; valid = false; return; } } } }
<gh_stars>0 package kubeflowpipelines import ( "fmt" "time" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_client/experiment_service" "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_model" ) func resourceKubeflowPipelinesExperiment() *schema.Resource { return &schema.Resource{ Create: resourceKubeflowPipelinesExperimentCreate, Read: resourceKubeflowPipelinesExperimentRead, Delete: resourceKubeflowPipelinesExperimentDelete, Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, Required: true, ForceNew: true, ValidateFunc: validation.StringIsNotEmpty, }, "description": { Type: schema.TypeString, Optional: true, ForceNew: true, ValidateFunc: validation.StringIsNotEmpty, }, "created_at": { Type: schema.TypeString, Computed: true, }, }, } } func resourceKubeflowPipelinesExperimentCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*Meta).Experiment context := meta.(*Meta).Context name := d.Get("name").(string) description := d.Get("description").(string) apiExperiment := experiment_model.APIExperiment{Name: name} if description != "" { apiExperiment.Description = description } experimentParams := experiment_service.CreateExperimentParams{ Body: &apiExperiment, Context: context, } resp, err := client.ExperimentService.CreateExperiment(&experimentParams, nil) if err != nil { return fmt.Errorf("unable to create experiment: %s", err) } d.Set("name", resp.Payload.Name) d.Set("description", resp.Payload.Description) d.Set("created_at", time.Time(resp.Payload.CreatedAt).Format(time.RFC3339)) d.SetId(resp.Payload.ID) return resourceKubeflowPipelinesExperimentRead(d, meta) } func resourceKubeflowPipelinesExperimentRead(d *schema.ResourceData, meta interface{}) error { experiment, err := readExperiment(meta, d.Id(), "") if err != nil { return fmt.Errorf("%s", err) } d.SetId(experiment.ID) d.Set("name", experiment.Name) d.Set("description", experiment.Description) d.Set("created_at", experiment.CreatedAt) return nil } func resourceKubeflowPipelinesExperimentDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*Meta).Experiment context := meta.(*Meta).Context id := d.Id() experimentParams := experiment_service.DeleteExperimentParams{ ID: id, Context: context, } _, err := client.ExperimentService.DeleteExperiment(&experimentParams, nil) if err != nil { return fmt.Errorf("unable to delete experiment: %s", err) } d.SetId("") return nil }
/* { dg-additional-options "-O2" } */ /* { dg-additional-options "-fdump-tree-parloops1-all" } */ /* { dg-additional-options "-fdump-tree-optimized" } */ #include <stdlib.h> #define N 500 unsigned int a[N][N]; void __attribute__((noinline,noclone)) foo (void) { int i, j; unsigned int sum = 1; #pragma acc kernels copyin (a[0:N]) copy (sum) { for (i = 0; i < N; ++i) for (j = 0; j < N; ++j) sum += a[i][j]; } if (sum != 5001) abort (); } /* Check that only one loop is analyzed, and that it can be parallelized. */ /* { dg-final { scan-tree-dump-times "SUCCESS: may be parallelized" 1 "parloops1" } } */ /* { dg-final { scan-tree-dump-not "FAILED:" "parloops1" } } */ /* { dg-final { scan-tree-dump-times "parallelizing outer loop" 1 "parloops1" } } */ /* Check that the loop has been split off into a function. */ /* { dg-final { scan-tree-dump-times "(?n);; Function .*foo.*._omp_fn.0" 1 "optimized" } } */ /* { dg-final { scan-tree-dump-times "(?n)oacc function \\(0," 1 "parloops1" } } */
Factors Influencing the Properties of Extrusion-Based 3D-Printed Alkali-Activated Fly Ash-Slag Mortar The mix proportioning of extrusion-based 3D-printed cementitious material should balance printability and hardened properties. This paper investigated the effects of three key mix proportion parameters of 3D-printed alkali-activated fly ash/slag (3D-AAFS) mortar, i.e., the sand to binder (s/b) ratio, fly ash/ground granulated blast-furnace slag (FA/GGBS) ratio, and silicate modulus (Ms) of the activator, on extrudability, buildability, interlayer strength, and drying shrinkage. The results showed that the loss of extrudability and the development of buildability were accelerated by increasing the s/b ratio, decreasing the FA/GGBS ratio, or using a lower Ms activator. A rise in the s/b ratio improved the interlayer strength and reduces the drying shrinkage. Although increasing the FA/GGBS mass ratio from 1 to 3 led to a reduction of 35% in the interlayer bond strength, it decreased the shrinkage strain by half. A larger silicate modulus was beneficial to the interlayer bond strength, but it made shrinkage more serious. Moreover, a simple centroid design method was developed for optimizing the mix proportion of 3D-AAFS mortar to simultaneously meet the requirements of printability and hardened properties. Unlike the conventional building process, 3D-printed concrete is a from-work free construction technique. Hence, the first challenge of this technology is to prepare concrete materials that are compatible with printing technology. That is, the mixture needs good fluidity and self-supporting capacity to ensure that the continuous concrete can be extruded through the nozzle and bears the load generated by subsequent concrete layers . The printability of concrete is mainly characterized by extrudability and buildability . Materials 2022, 15, 1969 3 of 18 Trial and error or single factor variable methods are often used to optimize the mix proportion of 3D-printed concrete. Obviously, these design methods cannot take printability, interlayer bonding, and volume stability into consideration simultaneously. These three properties are the most important properties for 3D-printed geopolymers. In addition, the properties of geopolymers may vary greatly due to the regional nature of geopolymer raw materials. This brings much more difficulties to the design of 3D-printed geopolymers. The interlayer bond strength and volume stability, which are also extremely important for the engineering application of 3D printing materials, are usually not considered in mix proportioning. Therefore, it is necessary to develop a mix proportioning method of 3D printing geopolymer mixtures, which can take printability, interlayer bond strength, and volume stability into consideration at the same time. This will greatly help to promote the application of this eco-friendly material in 3D printing technology. For a full understanding of the relationship between the composition of 3D-printed alkali-activated fly ash/slag (3D-AAFS) mortar and its properties, this study comprehensively investigated the influences of the sand-to-binder ratio, the relative proportion of FA-GGBS precursors, and the silicate modulus of the activator on the printability, interlayer bond strength, and volume stability of 3D-AAFS mortar for the first time. Moreover, a simple centroid design method was developed for mix proportioning of extrusionbased 3D-AAFS mortar to strike a balance among printability, interlayer bond strength, and volume stability. This study enriched the mix design concept for 3D-printed alkaliactivated materials. Raw Materials and Sample Preparation In this study, Grade I fly ash (FA) powder in compliance with GB/T 1596-2017 and Grade 95 ground granulated blast furnace slag powder (GGBS) in accordance with GB/T 18046-2017 were used to prepare 3D-printed alkali-activated fly ash/slag (3D-AAFS) mortars. Their chemical compositions were determined by X-ray fluorescence (Axios mAX, PANalytical, Almelo, The Netherlands) and are given in Table 1. The density and Blaine specific surface area were 2350 kg/m 3 and 263 m 2 /kg for fly ash and 2860 kg/m 3 and 487 m 2 /kg for GGBS. The fine aggregate used was river sand with a fineness modulus of 2.47. The particle size distributions of powder materials and sand are shown in Figure 1. The alkaline activator was prepared using sodium hydroxide pellets (analytical grade, purity ≥ 98%), liquid sodium silicate with an original silicate modulus (Ms = SiO 2 /Na 2 O) of 3.1 (water content of 62%), and distilled water to achieve different silicate moduli (i.e., 0, 0.5, and 1). 3D-AAFS mortars with a water-to-binder (w/b) ratio of 0.35 and sand-to-binder (s/b) ratio varying from 0.8 to 1.2 were prepared. The water contained in liquid sodium silicate was considered in the total mixing water. The mix proportions of 3D-AAFS mortars are presented in Table 2. According to the designed silicate modulus shown in Table 2, the alkaline solution was prepared 2 h before the experiment and cooled to room temperature (25 ± 3 • C). The powder materials were mixed thoroughly in a Turbula shaker (WAB AG, Basel, Switzerland) for 24 h. Before preparing the 3D-AAFS mortars, the dry-mixed powder and sand were introduced into the mixer, and then the alkaline solution was added and stirred at 500 rpm for 4 min. 3D-AAFS mortars with a water-to-binder (w/b) ratio of 0.35 and sand-to-binder (s/b ratio varying from 0.8 to 1.2 were prepared. The water contained in liquid sodium silicat was considered in the total mixing water. The mix proportions of 3D-AAFS mortars ar presented in Table 2. According to the designed silicate modulus shown in Table 2, th alkaline solution was prepared 2 h before the experiment and cooled to room temperatur (25 ± 3 °C). The powder materials were mixed thoroughly in a Turbula shaker (WAB AG Basel, Switzerland) for 24 h. Before preparing the 3D-AAFS mortars, the dry-mixed pow der and sand were introduced into the mixer, and then the alkaline solution was added and stirred at 500 rpm for 4 min. Rheological Tests for AAFS Mortar Rheological tests were performed by a Rheolab QC rheometer (Anton Paar, Graz Austria) with a cylindrical geometry of 41.94 mm in inner diameter. The type of rotato was ST22-4 V-40-SN20452, and the height and width of each blade were 40.00 mm and 22.00 mm, respectively. During the test, the temperature was kept at 25 °C using a wate bath. Dynamic Test The procedure of the dynamic yield stress test consisted of preshearing at 100 s −1 fo Rheological Tests for AAFS Mortar Rheological tests were performed by a Rheolab QC rheometer (Anton Paar, Graz, Austria) with a cylindrical geometry of 41.94 mm in inner diameter. The type of rotator was ST22-4 V-40-SN20452, and the height and width of each blade were 40.00 mm and 22.00 mm, respectively. During the test, the temperature was kept at 25 • C using a water bath. Dynamic Test The procedure of the dynamic yield stress test consisted of preshearing at 100 s −1 for 60 s, resting for 15 s, ramping up from 0 to 100 s −1 in 60 s, stopping for 30 s, and then ramping down from 100 to 0 s −1 in 60 s. The Herschel-Bulkley model (H-B model, Equation (1)) was used to characterize the rheological behavior of the samples. where τ is the shear stress (Pa), τ d,0 is the dynamic yield stress (Pa), K is the consistency coefficient (Pa·s), and n is the dimensionless fluidity index. Static Test Before the static yield shear test, the sample was presheared at 100 s −1 for 60 s to reach a consistent initial state . The test was run at a constant shear rate of 0.02 s −1 for 60 s, the peak value of the measured shear stress was denoted as the static yield stress. The test was repeated every 5 min until the yield stress reached the upper limit value of the Materials 2022, 15, 1969 5 of 18 instrument. In this paper, the model (Equation (2)) proposed by Roussel was used to fit the static yield stress data, which was defined as the measured peak shear stress. where τ 0,0 and A thix are the static yield stress and the structural build-up rate, respectively. Table Test The flowability of the AAFS mortar was evaluated by using a flow table test on the basis of ASTM C230 . All specimens were tested every 10 min after mixing. It has been proven that the spread diameter determined by the flow table test is related to its pumpability and extrudability . In this work, it was found that the mixture with a spread diameter less than 200 mm cannot be continuously extruded from the 3D printer. Buildability Test A self-designed device was used to measure the buildability of 3D-AAFS mortar. The details of the device and operations information can be found in our previous work . In this method, the demolded AAFS mortar specimen with a diameter of 50 mm was subjected to a load, which equaled 20 times the weight of the specimen. The deformation under this 20-layer load was recorded and used to evaluate the buildability. The buildability of mortar is acceptable when its deformation is smaller than 0.2% . Preparation of 3D-AAFS Mortar Specimen For mortar printing, a lab-scale 3D concrete printer with a round nozzle of 30 mm diameter, introduced by earlier works , was employed in this study. In this work, the printing speed was set at 30 mm/s, and the printing interval time was controlled at 5 min. All the samples for mechanical tests and drying shrinkage tests were cut from the printing specimen. The 300 × 75 × 75 mm 3 blocks were printed. Immediately after printing, both ends of the printed block were cut off, and the middle part of block (285 × 75 × 75 mm 3 ) was used for the drying shrinkage test, and the sample was cured at 25 ± 2 • C and 50 ± 5% relative humidity. For the interlayer bond strength test, the printed blocks were cured at 25 • C and 98% relative humidity for 2 days and then cut into 70 mm × 70 mm × 70 mm cubes. The cubes were cured at 25 • C and 98% relative humidity. Interlayer Bond Strength Test The interlayer bond strengths of the specimen at 3, 7, 28, and 90 days were measured by the slant shear bond strength, as shown in Figure 2. In the slant shear strength test, the load was directly applied to the mold, and the inclined angle of the specimen was set to 60 degrees . The loading rate was 2.4 kN/s. The interlayer shear stress (τ) was calculated by Equation (3). where τ (Pa) is the interfacial shear stress, P (N) is the critical load of interface sliding, and A is the bond area of the specimen. Drying Shrinkage The drying shrinkage test in this study was carried out according to ASTM C157 . For the measurement of drying shrinkage strains, the 3D AAFS mortar specimen was placed in the shrinkage frame in the vertical position, and an electronic dial gauge was mounted on the top, as shown in Figure 3. The specimens were kept at a controlled temperature of 25 ± 2 • C and relative humidity of 50 ± 5%. The dial gauge reading was recorded at the age of 1, 2, 3,5,7,9,11,14,21,28,56, and 90 days. The shrinkage strain of the mix reported in the paper is the average of three specimens. where τ (Pa) is the interfacial shear stress, P (N) is the critical load of interface sliding, A is the bond area of the specimen. Drying Shrinkage The drying shrinkage test in this study was carried out according to ASTM C157 For the measurement of drying shrinkage strains, the 3D AAFS mortar specimen placed in the shrinkage frame in the vertical position, and an electronic dial gauge mounted on the top, as shown in Figure 3. The specimens were kept at a controlled perature of 25 ± 2 °C and relative humidity of 50 ± 5%. The dial gauge reading was orded at the age of 1, 2, 3, 5, 7, 9, 11, 14, 21, 28, 56, and 90 days. The shrinkage strain o mix reported in the paper is the average of three specimens. Drying Shrinkage The drying shrinkage test in this study was carried out according to ASTM C157 For the measurement of drying shrinkage strains, the 3D AAFS mortar specimen placed in the shrinkage frame in the vertical position, and an electronic dial gauge mounted on the top, as shown in Figure 3. The specimens were kept at a controlled perature of 25 ± 2 °C and relative humidity of 50 ± 5%. The dial gauge reading was orded at the age of 1, 2, 3, 5, 7, 9, 11, 14, 21, 28, 56, and 90 days. The shrinkage strain o mix reported in the paper is the average of three specimens. . Figure 4a reveals that the τ d,0 value of AAFS mortar is sensitive to the s/b ratio. The rise of the s/b ratio leads to a significantly higher τ d,0 . As seen, the τ d,0 of P1-S1.2-M0 is nearly twice the value of P1-S0.8-M0. The increase of s/b ratio results in a higher solid volume fraction and the increase of particle friction, which is mainly responsible for the greater dynamic yield stress . Moreover, Figure 4b indicates that the FA/GGBS ratio also affects τ d,0 of the mortar. The value shows a slight decline when the FA/GGBS ratio increases from 1 to 3 due to the spherical geometry and smooth surface of FA particles . Figure 4c shows that τ d,0 has a 40% reduction when the silicate modulus of the activator solution increases from 0 to 0.5. This suggests that the presence of sodium silicate in the activator solution greatly lowers τ d,0 compared to the AAFS mortar activated only by NaOH (P1-S1-M0). Rheological Behavior An increase in the silicate modulus in the NaOH + sodium silicate activator solution can further reduce τ d,0 of the AAFS mortar. It is attributed to the stronger repulsive electrostatic force of particles caused by the increase of silicate modulus of activator solution . over, Figure 4b indicates that the FA/GGBS ratio also affects τd,0 of the mortar. The v shows a slight decline when the FA/GGBS ratio increases from 1 to 3 due to the sphe geometry and smooth surface of FA particles . Figure 4c shows that τd,0 has a reduction when the silicate modulus of the activator solution increases from 0 to 0.5. suggests that the presence of sodium silicate in the activator solution greatly lower compared to the AAFS mortar activated only by NaOH (P1-S1-M0). An increase in silicate modulus in the NaOH + sodium silicate activator solution can further reduc of the AAFS mortar. It is attributed to the stronger repulsive electrostatic force of part caused by the increase of silicate modulus of activator solution . (2)) was employed to fit these d and the results are given in Table 3. The results show that mortar with a larger s/b has a greater Athix, indicating that a higher s/b ratio is beneficial for the structural bui of AAFS mortar. By comparing the Athix values of P1-S0.8-M0, P2-S0.8-M0, and P3-M0, it can be found that the increase in FA proportion dramatically slows down the st tural buildup rate of mortar. The Athix value of the mortar with an FA/GGBS mass rat 3 is only one quarter of the value for the mortar with a ratio of 1 due to the lower diss tion rate of the glassy structure of FA as compared to GGBS . In addition, the struct buildup rates of those activated by NaOH+sodium silicate are much slower than t activated by NaOH. Increasing the silicate modulus of the activator from 0.5 to 1 slig decreased Athix. This means that the increase in silicate modulus is unfavorable for structure formation of AAFS mortar, which is owing to the retardation effect of the la Ms on the formation of reaction products . (2)) was employed to fit these data, and the results are given in Table 3. The results show that mortar with a larger s/b ratio has a greater A thix , indicating that a higher s/b ratio is beneficial for the structural buildup of AAFS mortar. By comparing the A thix values of P1-S0.8-M0, P2-S0.8-M0, and P3-S0.8-M0, it can be found that the increase in FA proportion dramatically slows down the structural buildup rate of mortar. The A thix value of the mortar with an FA/GGBS mass ratio of 3 is only one quarter of the value for the mortar with a ratio of 1 due to the lower dissolution rate of the glassy structure of FA as compared to GGBS . In addition, the structural buildup rates of those activated by NaOH+sodium silicate are much slower than those activated by NaOH. Increasing the silicate modulus of the activator from 0.5 to 1 slightly decreased A thix . This means that the increase in silicate modulus is unfavorable for the structure formation of AAFS mortar, which is owing to the retardation effect of the larger Ms on the formation of reaction products . Figure 6a,b exhibit the changes in fluidity and buildability of AAFS mortar with time, respectively. The spread diameter measured by the flow table has been proven to be useful for predicting the extrudability of cementitious material . A larger spread diameter corresponds to a better extrudability . It can be seen from Figure 6a that the initial spread diameters of all mortars (at 5 min) are larger than 200 mm, and all of them can be continuously extruded from the nozzle. The initial spread diameter of the AAFS mortar increases with decreasing s/b ratio, increasing FA/GGBS ratio, and increasing silicate modulus of the alkaline solution. As seen in Figure 7, the spread diameter of mortar shows a good negative correlation with its dynamic yield stress. Therefore, the effects of these Materials 2022, 15, 1969 8 of 18 three factors on the dynamic yield stress of AAFS mortar should be responsible for their influences on the initial extrudability. Moreover, the spread diameter of mortar with a larger s/b ratio, a lower FA proportion, or a smaller silicate modulus of activator solution reduces faster to be smaller than 200 mm (the critical value corresponding to the acceptable extrudability) earlier. Figure 6a,b exhibit the changes in fluidity and buildability of AAFS mortar with t respectively. The spread diameter measured by the flow table has been proven to be ful for predicting the extrudability of cementitious material . A larger spread di eter corresponds to a better extrudability . It can be seen from Figure 6a that the in spread diameters of all mortars (at 5 min) are larger than 200 mm, and all of them ca continuously extruded from the nozzle. The initial spread diameter of the AAFS mo increases with decreasing s/b ratio, increasing FA/GGBS ratio, and increasing sili modulus of the alkaline solution. As seen in Figure 7, the spread diameter of mortar sh a good negative correlation with its dynamic yield stress. Therefore, the effects of t three factors on the dynamic yield stress of AAFS mortar should be responsible for t influences on the initial extrudability. Moreover, the spread diameter of mortar wi larger s/b ratio, a lower FA proportion, or a smaller silicate modulus of activator solu reduces faster to be smaller than 200 mm (the critical value corresponding to the acce ble extrudability) earlier. Figure 6b shows that the initial deformations of all AAFS mortars are greater than 0.2%, which indicates that the buildability levels of all specimens are not acceptable. The deformation value decreases with time, reflecting the improvement of buildability. The increase in the s/b ratio has a positive effect on the buildability of mortar, and mortar with a high sand content reaches an acceptable buildability quicker. The buildability shows an insignificant difference as the FA/GGBS mass ratio increases from 1 to 2 during the whole measurement period. However, the buildability of mortar with an FA/GGBS ratio of 3 is slightly poorer than that for a lower FA/GGBS ratio. The increase in the silicate modulus of the activator solution greatly harms the initial value and the development rate of buildability. Figure 8 plots the static yield strengths of AAFS mortars vs. their deformation. The result shows that the buildability of mortar correlates well to its static yield strength. A higher static yield strength responds to a greater capacity to resist the weight of subsequently extruded layers. The effects of the s/b ratio, FA/GGBS ratio, and silicate modulus on the buildability should be attributed to their effects on the static yield stress. According to the result shown in Figure 8, the buildability of AAFS mortar is acceptable when the static yield stress is larger than~3.7 kPa. The higher structuration rate (A thix ) resulted from an increase in the s/b ratio, a decrease in the FA/GGBS ratio, or a decrease in the silicate modulus, which accelerated the development of AAFS mortar buildability to meet the requirements of 3D printing manufacturing. Figure 6b shows that the initial deformations of all AAFS mortars are greater than 0.2%, which indicates that the buildability levels of all specimens are not acceptable. The Figure 6b shows that the initial deformations of all AAFS mortars are greater than 0.2%, which indicates that the buildability levels of all specimens are not acceptable. The on the buildability should be attributed to their effects on the static yield stress. According to the result shown in Figure 8, the buildability of AAFS mortar is acceptable when the static yield stress is larger than ~3.7 kPa. The higher structuration rate (Athix) resulted from an increase in the s/b ratio, a decrease in the FA/GGBS ratio, or a decrease in the silicate modulus, which accelerated the development of AAFS mortar buildability to meet the requirements of 3D printing manufacturing. In this study, tE was defined as the duration time AAFS mortar maintains extrudabil ity, which can be estimated by the time the spread diameter becomes smaller than 200 mm. The tB was defined as the needed time for obtaining good buildability, corresponding to the time at which the deformation under a 20-layer load decreased to less than 0.2% Table 4 lists the tE and tB of AAFS mortars estimated from Figure 6a,b. Both tE and tB de termine the open time for the 3D printing process. The total operation time of 3D printing should be shorter than the tE owing to the extrudability requirement. Moreover, to ensure that the bottom layers, especially the first layer, can endure the weight of subsequent lay ers being deposited on the top without excessive distortion and failures, the start time o 3D printing is suggested to be later than tB. Table 4 shows that an increase in the s/b ratio not only shortens tE but also reduces tB. Increasing the FA/GGBS ratio of the binder or the silicate modulus of the activator extends the tE and prolongs the tB. Increasing the FA/GGBS ratio of the binder or the silicate modulus of the activator retards the reaction products formation, which reduces the structural buildup of mortar and thus extends the tE and prolongs the tB, as seen in Table 4. In this study, t E was defined as the duration time AAFS mortar maintains extrudability, which can be estimated by the time the spread diameter becomes smaller than 200 mm. The t B was defined as the needed time for obtaining good buildability, corresponding to the time at which the deformation under a 20-layer load decreased to less than 0.2%. Table 4 lists the t E and t B of AAFS mortars estimated from Figure 6a,b. Both t E and t B determine the open time for the 3D printing process. The total operation time of 3D printing should be shorter than the t E owing to the extrudability requirement. Moreover, to ensure that the bottom layers, especially the first layer, can endure the weight of subsequent layers being deposited on the top without excessive distortion and failures, the start time of 3D printing is suggested to be later than t B . Table 4 shows that an increase in the s/b ratio not only shortens t E but also reduces t B. Increasing the FA/GGBS ratio of the binder or the silicate modulus of the activator extends the t E and prolongs the t B . Increasing the FA/GGBS ratio of the binder or the silicate modulus of the activator retards the reaction products formation, which reduces the structural buildup of mortar and thus extends the t E and prolongs the t B , as seen in Table 4. Interlayer Bond Strength The developments of the interlayer bond strength of the 3D-AAFS mortars are shown in Figure 9. A rapid increase in the interlayer bond strength of 3D-AAFS mortar can be seen at early stages. Most of the mortars reach more than 60% of the strength of 90 days within 7 days. The early interlayer bond strength is enhanced with an increase in the s/b ratio, and an increase in the s/b ratio from 0.8 to 1.2 leads to an increase in the strength of 1 MPa at 7 days. However, the influence of the s/b ratio on the interlayer strength of 3D-AAFS mortar at 90 days is very small. Conversely, the FA/GGBS mass ratio shows a significant impact on the interlayer bond strength of the printed specimen. The strength of the specimen has a 50% reduction at 7 days and a 40% reduction at 90 days due to the increase in the FA/GGBS ratio from 1 to 3. Figure 9c reveals that the printed mortar with a greater silicate modulus has a much higher interlayer bond strength after 7 days. As the silicate modulus increases from 0 to 1, the strength of the printed specimen is finally increased by~2 MPa. seen at early stages. Most of the mortars reach more than 60% of the strength of 90 days within 7 days. The early interlayer bond strength is enhanced with an increase in the s/b ratio, and an increase in the s/b ratio from 0.8 to 1.2 leads to an increase in the strength of 1 MPa at 7 days. However, the influence of the s/b ratio on the interlayer strength of 3D-AAFS mortar at 90 days is very small. Conversely, the FA/GGBS mass ratio shows a significant impact on the interlayer bond strength of the printed specimen. The strength of the specimen has a 50% reduction at 7 days and a 40% reduction at 90 days due to the increase in the FA/GGBS ratio from 1 to 3. Figure 9c reveals that the printed mortar with a greater silicate modulus has a much higher interlayer bond strength after 7 days. As the silicate modulus increases from 0 to 1, the strength of the printed specimen is finally increased by ~2 MPa. In summary, the increase in the s/b ratio only improves the interlayer bond strength at the early stage, but it insignificantly influences the final strength. Increasing the FA/GGBS mass ratio negatively affects the interlayer bond strength, while enlarging the silicate modulus is positive to improve the strength. Drying Shrinkage Figures 10a-c display the changes in the drying shrinkage strain of 3D-AAFS mortars with time. The results demonstrate that the drying shrinkage of the printed specimen greatly depends on the s/b ratio and FA/GGBS mass ratio. The effect of the silicate modulus on the drying shrinkage is relatively slighter than that of the other factors. The strains caused by drying shrinkage for P1-S0.8-M0 and P1-S1-M0 were similar within 90 days. In summary, the increase in the s/b ratio only improves the interlayer bond strength at the early stage, but it insignificantly influences the final strength. Increasing the FA/GGBS mass ratio negatively affects the interlayer bond strength, while enlarging the silicate modulus is positive to improve the strength. Drying Shrinkage Figure 10a-c display the changes in the drying shrinkage strain of 3D-AAFS mortars with time. The results demonstrate that the drying shrinkage of the printed specimen greatly depends on the s/b ratio and FA/GGBS mass ratio. The effect of the silicate modulus on the drying shrinkage is relatively slighter than that of the other factors. The strains caused by drying shrinkage for P1-S0.8-M0 and P1-S1-M0 were similar within 90 days. This means that increasing the s/b ratio from 0.8 to 1 does not influence the drying shrinkage degree of mortar. However, further increasing the s/b ratio to 1.2 effectively reduces the drying shrinkage after 14 days. At 90 d, the drying shrinkage strain decreased from 0.12% to 0.08%. The 3D-AAFS mortar with a larger FA content shows a clear advantage in reducing drying shrinkage. Figure 10b shows that the dry shrinkage of the printed specimen with an FA/GGBS ratio of 1 is nearly 1.5 times larger than that for an FA/GGBS ratio of 2 and three times greater than that for an FA/GGBS ratio of 3 after 21 days. The variation in silicate modulus barely changes the drying shrinkage level of 3D-AAFS mortar before 28 days. After 28 days, the drying shrinkage increases slightly with increasing silicate modulus. Mixture Design of 3D-AAFS Mortar Using the Simplex Centroid Design Method Generally, the sand-to-binder ratio, the mass ratio of FA/GGBS, and the silicate modulus of the activator are the three most important parameters considered in the mix design process of alkali-activated fly ash/slag materials. These three parameters have been proven by many studies to determine the workability, mechanical properties, and durability of mixtures for traditional engineering applications. 0.12% to 0.08%. The 3D-AAFS mortar with a larger FA content shows a clear advantage in reducing drying shrinkage. Figure 10b shows that the dry shrinkage of the printed specimen with an FA/GGBS ratio of 1 is nearly 1.5 times larger than that for an FA/GGBS ratio of 2 and three times greater than that for an FA/GGBS ratio of 3 after 21 days. The variation in silicate modulus barely changes the drying shrinkage level of 3D-AAFS mortar before 28 days. After 28 days, the drying shrinkage increases slightly with increasing silicate modulus. Mixture Design of 3D-AAFS Mortar Using the Simplex Centroid Design Method Generally, the sand-to-binder ratio, the mass ratio of FA/GGBS, and the silicate modulus of the activator are the three most important parameters considered in the mix design process of alkali-activated fly ash/slag materials. These three parameters have been proven by many studies to determine the workability, mechanical properties, and durability of mixtures for traditional engineering applications. The mix design of alkali-activated fly ash/slag materials for 3D printing manufacturing is more complex than that for conventional applications. Printable alkali-activated fly ash/slag materials should meet extra fresh-state and hardened-state requirements, including pumpability, extrudability, buildability, and interlayer strength. Moreover, the drying shrinkage of alkali-activated materials is more serious than that of Portland cement-based materials . The cracks caused by drying shrinkage will threaten the appearance, quality, and even safety of 3D-printed alkali-activated material construction. Therefore, drying shrinkage should be a crucial hardened-state requirement if alkali-activated materials are used for 3D printing. According to Sections 3.2-3.4, the mix proportion parameters, including the s/b ratio, FA/GGBS ratio, and silicate modulus, exert great influences on the printability, interlayer bond strength, and drying shrinkage of the 3D-printed alkali-activated fly ash/slag mortar. An increase in the s/b ratio shortens tE and tB, improves the interlayer bond strength, and reduces the drying shrinkage of the 3D-AAFS mortar. The rise of the FA/GGBS mass ratio of the binder leads to longer tE and tB, a weaker interlayer bond strength, and a smaller drying shrinkage. In addition, the increase in silicate modulus of the activator The mix design of alkali-activated fly ash/slag materials for 3D printing manufacturing is more complex than that for conventional applications. Printable alkali-activated fly ash/slag materials should meet extra fresh-state and hardened-state requirements, including pumpability, extrudability, buildability, and interlayer strength. Moreover, the drying shrinkage of alkali-activated materials is more serious than that of Portland cement-based materials . The cracks caused by drying shrinkage will threaten the appearance, quality, and even safety of 3D-printed alkali-activated material construction. Therefore, drying shrinkage should be a crucial hardened-state requirement if alkali-activated materials are used for 3D printing. According to Sections 3.2-3.4, the mix proportion parameters, including the s/b ratio, FA/GGBS ratio, and silicate modulus, exert great influences on the printability, interlayer bond strength, and drying shrinkage of the 3D-printed alkali-activated fly ash/slag mortar. An increase in the s/b ratio shortens t E and t B , improves the interlayer bond strength, and reduces the drying shrinkage of the 3D-AAFS mortar. The rise of the FA/GGBS mass ratio of the binder leads to longer t E and t B , a weaker interlayer bond strength, and a smaller drying shrinkage. In addition, the increase in silicate modulus of the activator extends the t E , prolongs the t B , enhances the interlayer bond strength, and enlarges the drying shrinkage. Given that all the properties of 3D-AAFS mortar are closely related to the three mix proportion parameters, it is difficult to optimize the mix proportion to satisfy all the requirements. The simple-centroid design method allows us to investigate the properties of mixtures simultaneously controlled by three factors. In the simple-centroid design method, if there are n variables, 2n−1 groups of tests are needed to obtain the corresponding contour map. For 3D-AAFS mortar, the factors s/b ratio, FA/GGBS ratio, and silicate modulus are independent of each other. Therefore, a linear substitution calculation method is proposed, which transforms the actual value of variables into the equivalent value with the sum of 100%. The corresponding calculation formulas and values are shown in Table 5 and Equations (4)- (6). Figure 11 describes the seven test points of the simplex centroid design method with three variables, namely, the vertex, midpoint, and center of the triangle. where x i , z i and ∆z i are the equivalent value, the actual value of the variable, and the maximum difference between actual values, respectively. Table 5. The actual value and linear substitution of three factors. Figure seven test points of the simplex centroid design method with three variables, namely, the vertex, midpoint, and center of the triangle. where , and ∆ are the equivalent value, the actual value of the variable, and the max imum difference between actual values, respectively. Figure Based on the experimental results of the properties shown in Table 6, the contours of t E , t B , and 90-day interlayer bond strength and 90-day drying shrinkage of 3D-AAFS mortar related to the s/b ratio, FA/GGBS mass ratio, and silicate modulus are drawn and shown in Figure 12a-d. These contours can not only be used to predict the performances of 3D-AAFS mortar through its mix proportion, but can also be used to optimize the mix proportion to achieve the designed properties. For example, the interlayer bond strength of 3D printing material was suggested to be higher than~6 MPa, and the dry shrinkage of alkaline-activated materials should be lower than~0.09% . In practice, t E is always required to be larger than 1 h . From the contours of each property, three critical lines of each property could be acquired to meet these required values, as shown in Figure 13. The overlapping area of three different areas in Figure 13 is regarded as the optimal mix proportion of AAFS mortar for 3D printing manufacturing, i.e., the optimum s/b ratio, FA/GGBS mass ratio, and silicate modulus are 0.8~1.0, 2.2~2.6, and 0.5~1.0, respectively. tE is always required to be larger than 1 h . From the contours of each property, three critical lines of each property could be acquired to meet these required values, as shown in Figure 13. The overlapping area of three different areas in Figure 13 is regarded as the optimal mix proportion of AAFS mortar for 3D printing manufacturing, i.e., the optimum s/b ratio, FA/GGBS mass ratio, and silicate modulus are 0.8~1.0, 2.2~2.6, and 0.5~1.0, respectively. P2-S1-M0 55 20 6.7 ± 0.28 0.09 ± 0.006 6 P1-S1-M0.5 110 40 9.7 ± 0.14 0.12 ± 0.011 7 P1.7-S1.1-M0. Conclusions The following conclusions can be summarized from the presented findings: (1) The composition of alkali-activated fly ash/slag (AAFS) mortar exerts a tremendous Figure 13. Optimization of the mix proportion of AAFS mortar for 3D printing manufacture. Conclusions The following conclusions can be summarized from the presented findings: (1) The composition of alkali-activated fly ash/slag (AAFS) mortar exerts a tremendous influence on the printability of AAFS mortar, which relates closely to the rheological properties. The printability of AAFS mortar relates closely to the rheological properties. The increase in the s/b ratio enlarges the dynamic yield stress and accelerates the structural buildup of mortar, resulting in a faster loss rate of extrudability and a quicker growth rate of buildability. Conversely, increasing the FA/GGBS mass ratio or the silicate modulus reduces both the dynamic yield stress and structuration rate, which extends the duration time of extrudability and slows down the development of buildability. The printability of AAFS mortar is most sensitive to the silicate modulus of the activator. (2) The hardened-state properties of 3D-printed AAFS mortar also depend on its mix proportion. Increasing the s/b ratio is conducive to improving the interlayer bond strength and diminishing the drying shrinkage. The rise in the FA/GGBS mass ratio weakens the interlayer bond strength and reduces the drying shrinkage. The use of an activator with a larger silicate modulus is beneficial to the interlayer bond strength, but it causes a slightly larger drying shrinkage. (3) A simple centroid design method was developed for mix proportioning of extrusionbased 3D-printed AAFS mortar for the first time, which took printability, interlayer bond strength, and drying shrinkage into consideration at the same time. By restricting the fresh-state and hardened-state requirements, the optimum mix proportion of 3D-AAFS mortar can be obtained using this method. Informed Consent Statement: Not applicable. Data Availability Statement: The data presented in this study are available on request from the corresponding author. Conflicts of Interest: The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper.
#include<bits/stdc++.h> #define f(i,a,n) for(int i=a;i<n;i++) #define ll long long int char ans[101]; using namespace std; int main(){ int n,k; cin>>n>>k; int num=k/9; int rem=k%9; if(n==1&&k<10){ cout<<k<<" "<<k; return 0; } if(k>(n*9)||k==0){ cout<<"-1 -1"; return 0; } if(rem){ for(int i=0;i<num;i++){ ans[n-i]='9'; } ans[1]='1'; if(n-num!=1) ans[n-num]='0'+(rem-1); else ans[1]='0'+rem; for(int i=2;i<(n-num);i++) ans[i]='0'; } else{ for(int i=0;i<num-1;i++){ ans[n-i]='9'; } ans[1]='1'; if(n-num+1!=1) ans[n-num+1]='0'+(8); else ans[1]='0'+9; for(int i=2;i<(n-num+1);i++) ans[i]='0'; } f(i,1,n+1)cout<<ans[i]; cout<<" "; for(int i=1;i<=num;i++){ ans[i]='9'; } ans[num+1]='0'+rem; for(int i=num+2;i<=n;i++)ans[i]='0'; f(i,1,n+1)cout<<ans[i]; return 0; }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; import * as assert from 'assert'; import URI from 'vs/base/common/uri'; import {Range} from 'vs/editor/common/core/range'; import {IMode, IndentAction} from 'vs/editor/common/modes'; import {TokenSelectionSupport} from 'vs/editor/contrib/smartSelect/common/tokenSelectionSupport'; import {createMockModelService} from 'vs/editor/test/common/servicesTestUtils'; import {MockTokenizingMode} from 'vs/editor/test/common/mocks/mockMode'; import {LanguageConfigurationRegistry} from 'vs/editor/common/modes/languageConfigurationRegistry'; class MockJSMode extends MockTokenizingMode { constructor() { super('js-tokenSelectionSupport', 'mock-js'); LanguageConfigurationRegistry.register(this.getId(), { brackets: [ ['(', ')'], ['{', '}'], ['[', ']'] ], onEnterRules: [ { // e.g. /** | */ beforeText: /^\s*\/\*\*(?!\/)([^\*]|\*(?!\/))*$/, afterText: /^\s*\*\/$/, action: { indentAction: IndentAction.IndentOutdent, appendText: ' * ' } }, { // e.g. /** ...| beforeText: /^\s*\/\*\*(?!\/)([^\*]|\*(?!\/))*$/, action: { indentAction: IndentAction.None, appendText: ' * ' } }, { // e.g. * ...| beforeText: /^(\t|(\ \ ))*\ \*(\ ([^\*]|\*(?!\/))*)?$/, action: { indentAction: IndentAction.None, appendText: '* ' } }, { // e.g. */| beforeText: /^(\t|(\ \ ))*\ \*\/\s*$/, action: { indentAction: IndentAction.None, removeText: 1 } }, { // e.g. *-----*/| beforeText: /^(\t|(\ \ ))*\ \*[^/]*\*\/\s*$/, action: { indentAction: IndentAction.None, removeText: 1 } } ] }); } } suite('TokenSelectionSupport', () => { let modelService = createMockModelService(); let tokenSelectionSupport = new TokenSelectionSupport(modelService); let _mode: IMode = new MockJSMode(); function assertGetRangesToPosition(text:string[], lineNumber:number, column:number, ranges:Range[]): void { let uri = URI.file('test.js'); modelService.createModel(text.join('\n'), _mode, uri); let actual = tokenSelectionSupport.getRangesToPositionSync(uri, { lineNumber: lineNumber, column: column }); let actualStr = actual.map(r => new Range(r.range.startLineNumber, r.range.startColumn, r.range.endLineNumber, r.range.endColumn).toString()); let desiredStr = ranges.map(r => String(r)); assert.deepEqual(actualStr, desiredStr); modelService.destroyModel(uri); } test('getRangesToPosition #1', () => { assertGetRangesToPosition([ 'function a(bar, foo){', '\tif (bar) {', '\t\treturn (bar + (2 * foo))', '\t}', '}' ], 3, 20, [ new Range(1, 1, 5, 2), new Range(1, 21, 5, 2), new Range(2, 1, 4, 3), new Range(2, 11, 4, 3), new Range(3, 1, 4, 2), new Range(3, 1, 3, 27), new Range(3, 10, 3, 27), new Range(3, 11, 3, 26), new Range(3, 17, 3, 26), new Range(3, 18, 3, 25), // new Range(3, 19, 3, 20) ]); }); });
<filename>internal/store/sqlstore/freelancerrep.go package sqlstore import ( "github.com/go-park-mail-ru/2019_2_Comandus/internal/model" ) type FreelancerRepository struct { store *Store } func (r *FreelancerRepository) Create(f *model.Freelancer) error { return r.store.db.QueryRow( "INSERT INTO freelancers (accountId, registrationDate, country, city, address, phone, tagLine, " + "overview, experienceLevelId, specialityId) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id", f.AccountId, f.RegistrationDate, f.Country, f.City, f.Address, f.Phone, f.TagLine, f.Overview, f.ExperienceLevelId, f.SpecialityId, ).Scan(&f.ID) } func (r *FreelancerRepository) Find(id int64) (*model.Freelancer, error) { f := &model.Freelancer{} if err := r.store.db.QueryRow( "SELECT id, accountId, registrationDate, country, city, address, phone, tagLine, " + "overview, experienceLevelId, specialityId FROM freelancers WHERE id = $1", id, ).Scan( &f.ID, &f.AccountId, &f.RegistrationDate, &f.Country, &f.City, &f.Address, &f.Phone, &f.TagLine, &f.Overview, &f.ExperienceLevelId, &f.SpecialityId, ); err != nil { return nil, err } return f, nil } func (r *FreelancerRepository) FindByUser(accountId int64) (*model.Freelancer, error) { f := &model.Freelancer{} if err := r.store.db.QueryRow( "SELECT id, accountId, registrationDate, country, city, address, phone, tagLine, " + "overview, experienceLevelId, specialityId FROM freelancers WHERE accountId = $1", accountId, ).Scan( &f.ID, &f.AccountId, &f.RegistrationDate, &f.Country, &f.City, &f.Address, &f.Phone, &f.TagLine, &f.Overview, &f.ExperienceLevelId, &f.SpecialityId, ); err != nil { return nil, err } return f, nil } func (r *FreelancerRepository) Edit(f * model.Freelancer) error { return r.store.db.QueryRow("UPDATE freelancers SET country = $1, city = $2, address = $3, " + "phone = $4, tagLine = $5, overview = $6, experienceLevelId = $7, specialityId = $8 WHERE id = $9 RETURNING id", f.Country, f.City, f.Address, f.Phone, f.TagLine, f.Overview, f.ExperienceLevelId, f.SpecialityId, f.ID, ).Scan(&f.ID) }
package classpath import ( "github.com/yuntao84/Civet/tool" "os" "path/filepath" "strings" ) type Classpath struct { sources []javaSource path string } func NewClasspath(path string) (*Classpath, error) { cp := &Classpath{path: path} err := cp.parseClasspath() if err != nil { return nil, err } return cp, nil } func (self *Classpath) ReadClass(className string) (data []byte, err error) { if has, data, err := self.read(self.sources, className); has { return data, err } return nil, err } func (self *Classpath) read(sources []javaSource, className string) (has bool, data []byte, err error) { for _, source := range sources { has, data, err = source.readClass(className) if has { return has, data, err } } return } func (self *Classpath) parseClasspath() (err error) { for _, path := range strings.Split(self.path, string(os.PathListSeparator)) { if strings.TrimSpace(path) == "" { continue } if tool.StringHasSuffix(path, string(os.PathSeparator)+"*") { dir := path[:len(path)-1] if !tool.FileExists(dir) || !tool.FileIsDir(dir) { continue } source, err := self.parseWildcardSource(path) if err != nil { return err } self.sources = append(self.sources, source) } if !tool.FileExists(path) { continue } if tool.StringHasSuffix(path, ".jar", ".zip") { if tool.FileIsDir(path) { continue } source, err := self.parseJarSource(path) if err != nil { return err } self.sources = append(self.sources, source) } if !tool.FileIsDir(path) { continue } source, err := self.parseDirSource(path) if err != nil { return err } self.sources = append(self.sources, source) } return } func (self *Classpath) parseJarSource(path string) (source javaSource, err error) { _, err = filepath.Abs(path) if err != nil { return } source = &javaSourceJar{ p: path, } return } func (self *Classpath) parseDirSource(path string) (source javaSource, err error) { _, err = filepath.Abs(path) if err != nil { return } source = &javaSourceDir{ p: path, } return } func (self *Classpath) parseWildcardSource(path string) (source javaSource, err error) { baseDir := path[:len(path)-1] _, err = filepath.Abs(baseDir) if err != nil { return } source, err = newWildcardJavaSource(path) return }
def execute_move(self, move, color): print(move) move_y = move[0] move_x = move[1] self[move_y][move_x] = color if color == 1: lastMoveW = move else: lastMoveB = move
/** * Gary is an avid hiker. He tracks his hikes meticulously, paying close attention to small details like topography. During his last * hike he took exactly n steps. For every step he took, he noted if it was an uphill, U, or a downhill, D step. Gary's hikes start * and end at sea level and each step up or down represents a 1 unit change in altitude. We define the following terms: * * A mountain is a sequence of consecutive steps above sea level, starting with a step up from sea level and ending with a step down * to sea level. * * A valley is a sequence of consecutive steps below sea level, starting with a step down from sea level and ending with a step up to * sea level. * * Given Gary's sequence of up and down steps during his last hike, find and print the number of valleys he walked through. For example, * if Gary's path is s = [DDUUUUDD], he first enters a valley 2 units deep. Then he climbs out an up onto a mountain 2 units high. * Finally, he returns to sea level and ends his hike. * * Function Description * * Complete the countingValleys function in the editor below. It must return an integer that denotes the number of valleys Gary traversed. * * countingValleys has the following parameter(s): * * n: the number of steps Gary takes * s: a string describing his path * * Input Format * * The first line contains an integer , the number of steps in Gary's hike. * The second line contains a single string , of characters that describe his path. * * Constraints * * 2<=n<=10^6 * s[i] E {UD} * * Output Format * * Print a single integer that denotes the number of valleys Gary walked through during his hike. * * Sample Input * * 8 * UDDDUDUU * * Sample Output * 1 * * Explanation * * If we represent _ as sea level, a step up as /, and a step down as \, Gary's hike can be drawn as: * *_/\ _ * \ / * \/\/ * * He enters and leaves one valley. * * @author Mariana Azevedo * @since 15/11/2019 * */ public class CountingValleys { private static final Logger logger = LoggerFactory.getLogger(CountingValleys.class); public static void main(String[] args) { logger.info(String.valueOf(countingValleys(8, "UDDDUDUU"))); //1 logger.info(String.valueOf(countingValleys(12, "DDUUDDUDUUUD"))); //2 logger.info(String.valueOf(countingValleys(10, "UDUUUDUDDD"))); //0 logger.info(String.valueOf(countingValleys(10, "DUDDDUUDUU"))); //2 } // Complete the countingValleys function below. static int countingValleys(int n, String s) { int numValleys = 0; int seaLvl = 0; for(int i = 0; i < n; i++) { char c = s.charAt(i); if(c == 'D') { seaLvl -= 1; } else { seaLvl += 1; } if(seaLvl == 0 && c == 'U') numValleys += 1; } return numValleys; } }
/** * @author Gary O'Neall * */ public class TestSpdxListedLicense { Model model; IModelContainer modelContainer = new IModelContainer() { @Override public String getNextSpdxElementRef() { return null; } @Override public Model getModel() { return model; } @Override public String getDocumentNamespace() { return "http://testNameSPace#"; } @Override public boolean spdxElementRefExists(String elementRef) { return false; } @Override public void addSpdxElementRef(String elementRef) throws InvalidSPDXAnalysisException { } @Override public String documentNamespaceToId(String externalNamespace) { return null; } @Override public String externalDocumentIdToNamespace(String docId) { return null; } @Override public Resource createResource(Resource duplicate, String uri, Resource type, IRdfModel modelObject) { if (duplicate != null) { return duplicate; } else if (uri == null) { return model.createResource(type); } else { return model.createResource(uri, type); } } @Override public boolean addCheckNodeObject(Node node, IRdfModel rdfModelObject) { return false; } }; /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { } /** * @throws java.lang.Exception */ @After public void tearDown() throws Exception { } @Test public void testCreate() throws InvalidSPDXAnalysisException, InvalidLicenseTemplateException { model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls = new String[] {"source url1", "source url2"}; String notes = "notes"; String standardLicenseHeader = "Standard license header"; String template = "template"; String licenseHtml = "<html>html</html>"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, notes, standardLicenseHeader, template, true, true, licenseHtml, false, null); Resource licResource = stdl.createResource(modelContainer); SpdxListedLicense compLic = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(id, compLic.getLicenseId()); assertEquals(text, compLic.getLicenseText()); List<String> verify = stdl.verify(); assertEquals(0, verify.size()); verify = compLic.verify(); assertEquals(0, verify.size()); assertEquals(name, compLic.getName()); assertEquals(sourceUrls.length, compLic.getSeeAlso().length); assertEquals(notes, compLic.getComment()); assertEquals(standardLicenseHeader, compLic.getStandardLicenseHeader()); assertEquals(template, compLic.getStandardLicenseTemplate()); assertTrue(compLic.isFsfLibre()); assertTrue(compLic.isOsiApproved()); assertFalse(compLic.isDeprecated()); } @Test public void testSetComment() throws InvalidSPDXAnalysisException { model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls = new String[] {"source url2", "source url3"}; String comments = "comments1"; String comments2 = "comments2"; String standardLicenseHeader = "Standard license header"; String template = "template"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, comments, standardLicenseHeader, template, true); Resource licResource = stdl.createResource(modelContainer); SpdxListedLicense compLic = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(comments, compLic.getComment()); compLic.setComment(comments2); assertEquals(comments2, compLic.getComment()); SpdxListedLicense compLic2 = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(comments2, compLic2.getComment()); StringWriter writer = new StringWriter(); model.write(writer); @SuppressWarnings("unused") String rdfstring = writer.toString(); List<String> verify = stdl.verify(); assertEquals(0, verify.size()); verify = compLic.verify(); assertEquals(0, verify.size()); } @Test public void testSetFsfLibre() throws InvalidSPDXAnalysisException { model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls = new String[] {"source url1", "source url2"}; String notes = "notes"; String standardLicenseHeader = "Standard license header"; String template = "template"; String licenseHtml = "<html>html</html>"; String deprecatedVersion = "3.2"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, notes, standardLicenseHeader, template, false, false, licenseHtml, false, null); assertFalse(stdl.isFsfLibre()); stdl.setFsfLibre(true); assertTrue(stdl.isFsfLibre()); Resource licResource = stdl.createResource(modelContainer); SpdxListedLicense compLic = new SpdxListedLicense(modelContainer, licResource.asNode()); assertTrue(stdl.isFsfLibre()); compLic.setFsfLibre(false); assertFalse(compLic.isFsfLibre()); SpdxListedLicense compLic2 = new SpdxListedLicense(modelContainer, licResource.asNode()); assertFalse(compLic2.isFsfLibre()); List<String> verify = stdl.verify(); assertEquals(0, verify.size()); verify = compLic.verify(); assertEquals(0, verify.size()); // Test for null value SpdxListedLicense stdl2 = new SpdxListedLicense(name, id, text, sourceUrls, notes, standardLicenseHeader, template, false, null, licenseHtml, true, deprecatedVersion); assertTrue(stdl2.getFsfLibre() == null); assertFalse(stdl2.isFsfLibre()); assertFalse(stdl2.isNotFsfLibre()); Resource licResource2 = stdl2.createResource(modelContainer); SpdxListedLicense compLic3 = new SpdxListedLicense(modelContainer, licResource2.asNode()); assertTrue(compLic3.getFsfLibre() == null); assertFalse(compLic3.isFsfLibre()); assertFalse(compLic3.isNotFsfLibre()); compLic3.setFsfLibre(false); assertFalse(compLic3.getFsfLibre() == null); assertFalse(compLic3.isFsfLibre()); assertTrue(compLic3.isNotFsfLibre()); SpdxListedLicense compLic4 = new SpdxListedLicense(modelContainer, licResource2.asNode()); assertFalse(compLic4.getFsfLibre() == null); assertFalse(compLic4.isFsfLibre()); assertTrue(compLic4.isNotFsfLibre()); } @Test public void testSetDeprecated() throws InvalidSPDXAnalysisException { model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls = new String[] {"source url2", "source url3"}; String comments = "comments1"; String standardLicenseHeader = "Standard license header"; String template = "template"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, comments, standardLicenseHeader, template, true); stdl.setDeprecated(true); Resource licResource = stdl.createResource(modelContainer); SpdxListedLicense compLic = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(true, compLic.isDeprecated()); compLic.setDeprecated(false); assertEquals(false, compLic.isDeprecated()); SpdxListedLicense compLic2 = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(false, compLic2.isDeprecated()); List<String> verify = stdl.verify(); assertEquals(0, verify.size()); verify = compLic.verify(); assertEquals(0, verify.size()); } @Test public void testSetIDandText() throws InvalidSPDXAnalysisException { model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls = new String[] {"source url2", "source url3"}; String notes = "notes"; String standardLicenseHeader = "Standard license header"; String template = "template"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, notes, standardLicenseHeader, template, true); Resource licResource = stdl.createResource(modelContainer); SpdxListedLicense compLic = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(id, compLic.getLicenseId()); assertEquals(text, compLic.getLicenseText()); String newID = "newID"; String newText = "new Text"; compLic.setLicenseId(newID); compLic.setLicenseText(newText); assertEquals(newID, compLic.getLicenseId()); assertEquals(newText, compLic.getLicenseText()); SpdxListedLicense compLic2 = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(newID, compLic2.getLicenseId()); assertEquals(newText, compLic2.getLicenseText()); List<String> verify = stdl.verify(); assertEquals(1, verify.size()); // verify will fail since this is not a valid listed license ID verify = compLic.verify(); assertEquals(1, verify.size()); // verify will fail since this is not a valid listed license ID } @Test public void testCreateMultile() { // test to make sure if we create a node with the same id, we // get back the same URI model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls1 = new String[] {"source url1", "source url2"}; String[] sourceUrls2 = new String[] {"source url3", "source url4"}; String notes = "notes"; String standardLicenseHeader = "Standard license header"; String template = "template"; String id2 = "Apache-1.0"; String name2 = "name2"; try { SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls1, notes, standardLicenseHeader, template, true); Resource licResource = stdl.createResource(modelContainer); SpdxListedLicense stdl3 = new SpdxListedLicense(name2, id2, text, sourceUrls2, notes, standardLicenseHeader, template, true); @SuppressWarnings("unused") Resource compResource3 = stdl3.createResource(modelContainer); SpdxListedLicense stdl2 = new SpdxListedLicense(name2, id, text, sourceUrls2, notes, standardLicenseHeader, template, true); Resource compResource = stdl2.createResource(modelContainer); assertTrue(licResource.equals(compResource)); assertEquals(licResource.getURI(), compResource.getURI()); } catch (InvalidSPDXAnalysisException e) { throw new RuntimeException(e); } } @Test public void testClone() throws InvalidSPDXAnalysisException { model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls = new String[] {"source url1", "source url2"}; String notes = "notes"; String standardLicenseHeader = "Standard license header"; String template = "template"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, notes, standardLicenseHeader, template, true); stdl.setDeprecated(true); Resource licResource = stdl.createResource(modelContainer); SpdxListedLicense compLic = new SpdxListedLicense(modelContainer, licResource.asNode()); SpdxListedLicense lic2 = (SpdxListedLicense)compLic.clone(); assertEquals(id, lic2.getLicenseId()); assertEquals(text, lic2.getLicenseText()); assertEquals(notes, lic2.getComment()); assertEquals(name, lic2.getName()); assertTrue(compareArrayContent(sourceUrls, lic2.getSeeAlso())); assertEquals(standardLicenseHeader, lic2.getStandardLicenseHeader()); assertEquals(template, lic2.getStandardLicenseTemplate()); assertTrue(lic2.getResource() == null); assertEquals(true, lic2.isDeprecated()); } /** * @param strings1 * @param strings2 * @return true if both arrays contain the same content independent of order */ private boolean compareArrayContent(String[] strings1, String[] strings2) { if (strings1.length != strings2.length) { return false; } for (int i = 0; i < strings1.length; i++) { boolean found = false; for (int j = 0; j < strings2.length; j++) { if (strings1[i].equals(strings2[j])) { found = true; break; } } if (!found) { return false; } } return true; } @Test public void testEquivalent() throws InvalidSPDXAnalysisException { model = ModelFactory.createDefaultModel(); String name = "name"; String name2 = "name2"; String id = "AFL-3.0"; String text = "text"; String text2 = "text2"; String[] sourceUrls = new String[] {"source url1", "source url2"}; String[] sourceUrls2 = new String[] {"source url2"}; String notes = "notes"; String notes2 = "notes2"; String standardLicenseHeader = "Standard license header"; String standardLicenseHeader2 = "Standard license header2"; String template = "template"; String template2 = "template2"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, notes, standardLicenseHeader, template, true); assertTrue(stdl.equivalent(stdl)); SpdxListedLicense stdl2 = new SpdxListedLicense(name2, id, text2, sourceUrls2, notes2, standardLicenseHeader2, template2, false); assertTrue(stdl2.equivalent(stdl)); stdl2.setLicenseId("Apache-2.0"); assertFalse(stdl.equivalent(stdl2)); } @Test public void testSetHeaderTemplate() throws InvalidSPDXAnalysisException { model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls = new String[] {"source url2", "source url3"}; String notes = "notes"; String standardLicenseHeader = "Standard license header"; String standardLicenseHeaderTemplate = "Standard license<<beginOptional>>optional<<endOptional>> header"; String template = "template"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, notes, standardLicenseHeader, template, standardLicenseHeaderTemplate, true, true); assertEquals(standardLicenseHeaderTemplate, stdl.getStandardLicenseHeaderTemplate()); Resource licResource = stdl.createResource(modelContainer); SpdxListedLicense compLic = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(standardLicenseHeaderTemplate, compLic.getStandardLicenseHeaderTemplate()); String newHeaderTemplate = "New standard license template"; compLic.setStandardLicenseHeaderTemplate(newHeaderTemplate); assertEquals(newHeaderTemplate, compLic.getStandardLicenseHeaderTemplate()); SpdxListedLicense compLic2 = new SpdxListedLicense(modelContainer, licResource.asNode()); assertEquals(newHeaderTemplate, compLic2.getStandardLicenseHeaderTemplate()); List<String> verify = stdl.verify(); assertEquals(0, verify.size()); verify = compLic.verify(); assertEquals(0, verify.size()); } @Test public void testSetHeaderTemplateHtml() throws InvalidSPDXAnalysisException, InvalidLicenseTemplateException { model = ModelFactory.createDefaultModel(); String name = "name"; String id = "AFL-3.0"; String text = "text"; String[] sourceUrls = new String[] {"source url2", "source url3"}; String notes = "notes"; String standardLicenseHeader = "Standard license header"; String standardLicenseHeaderTemplate = "Standard license<<beginOptional>>optional<<endOptional>> header"; String template = "template"; String standardLicenseHeaderHtml = "<h1>licenseHeader</h1>"; String textHtml = "<h1>text</h1>"; SpdxListedLicense stdl = new SpdxListedLicense(name, id, text, sourceUrls, notes, standardLicenseHeader, template, standardLicenseHeaderTemplate, true, true, textHtml, standardLicenseHeaderHtml); assertEquals(textHtml, stdl.getLicenseTextHtml()); assertEquals(standardLicenseHeaderHtml, stdl.getLicenseHeaderHtml()); String newStandardLicenseHeaderHtml = "<h2>licenseHeader2</h2>"; String newTextHtml = "<h2>text2</h2>"; stdl.setLicenseTextHtml(newTextHtml); stdl.setLicenseHeaderHtml(newStandardLicenseHeaderHtml); assertEquals(newTextHtml, stdl.getLicenseTextHtml()); assertEquals(newStandardLicenseHeaderHtml, stdl.getLicenseHeaderHtml()); } }
// Package nrdb provides a programmatic API for interacting with NRDB, New Relic's Datastore package nrdb import "context" func (n *Nrdb) Query(accountID int, query NRQL) (*NRDBResultContainer, error) { return n.QueryWithContext(context.Background(), accountID, query) } // QueryWithContext facilitates making a NRQL query. func (n *Nrdb) QueryWithContext(ctx context.Context, accountID int, query NRQL) (*NRDBResultContainer, error) { respBody := gqlNrglQueryResponse{} vars := map[string]interface{}{ "accountId": accountID, "query": query, } if err := n.client.NerdGraphQueryWithContext(ctx, gqlNrqlQuery, vars, &respBody); err != nil { return nil, err } return &respBody.Actor.Account.NRQL, nil } func (n *Nrdb) QueryHistory() (*[]NRQLHistoricalQuery, error) { return n.QueryHistoryWithContext(context.Background()) } func (n *Nrdb) QueryHistoryWithContext(ctx context.Context) (*[]NRQLHistoricalQuery, error) { respBody := gqlNrglQueryHistoryResponse{} vars := map[string]interface{}{} if err := n.client.NerdGraphQueryWithContext(ctx, gqlNrqlQueryHistoryQuery, vars, &respBody); err != nil { return nil, err } return &respBody.Actor.NRQLQueryHistory, nil } const ( gqlNrqlQueryHistoryQuery = `{ actor { nrqlQueryHistory { accountId nrql timestamp } } }` gqlNrqlQuery = `query($query: Nrql!, $accountId: Int!) { actor { account(id: $accountId) { nrql(query: $query) { currentResults otherResult previousResults results totalResult metadata { eventTypes facets messages timeWindow { begin compareWith end since until } } } } } }` ) type gqlNrglQueryResponse struct { Actor struct { Account struct { NRQL NRDBResultContainer } } } type gqlNrglQueryHistoryResponse struct { Actor struct { NRQLQueryHistory []NRQLHistoricalQuery } }
Carcinosarcoma of the Uterus: An Elusive Diagnosis Introduction: Carcinosarcoma or Malignant Mixed Mullerian Tumor (MMMT) of the uterus is a rare malignant tumor comprising both carcinomatous and sarcomatous components. Worldwide it accounts for two to five percentages of all uterine malignancies. However, there is a paucity of reports in Nepalese literature. Case report: A 62 years postmenopausal woman with diabetes and hypertension presented with urinary symptoms for two months and passage of fleshy mass per vagina for two days. Ultrasound was inconclusive, whereas, Magnetic Resonance Imaging (MRI) showed an endo-cervical mass. Pre-operative biopsy of the mass suggested leiomyosarcoma. Abdominal hysterectomy with bilateral salpingo-oophorectomy and pelvic and para-aortic lymphadenectomy was performed. Per-operatively, an atrophic uterus, a dimple in the fundus, and ballooning of the lower uterine segment and vagina were noted. The cut section showed an exophytic polypoidal mass with base in the fundus. Histopathology revealed Stage IA carcinosarcoma with aggressive mitotic figures. Immunohistochemistry confirmed the diagnosis of carcinosarcoma with a heterologous rhabdomyosarcomatous component. Adjuvant chemotherapy and radiation therapy was advised. Conclusion: The preoperative diagnosis of carcinosarcoma or MMMT was difficult and might be missed on biopsy as well. Owing to its aggressive nature and higher rates of post-surgical recurrence, carcinosarcoma accounts for around one-fifth of deaths due to uterine malignancies. Surgery is the primary treatment modality, yet much study is needed before evidence-based adjuvant management for improving its outcome is established. Introduction Uterine malignancies constitute 7% of all female malignancies. Carcinosarcoma, also known as the Malignant Mixed Mullerian Tumor (MMMT) accounts for 2-5% of all uterine malignancies.1,2 During the embryogenesis, mullerian duct is formed from the invagination of coelomic epithelium lateral to the mesonephros. The mullerian duct is comprised of both epithelial and mesenchymal cell types.3 In the female, the duct gives rise to the fallopian tubes, uterus, cervix, and upper part of the vagina. Carcinosarcomas are rare but, highly aggressive and biphasic tumors with mixed epithelial and mesenchymal components have also been reported to occur in the vagina, cervix, and ovary.4,5,6 However, Nepalese literature is sparse in its reporting.
// NewTestBlockTestService creates and returns a new TestBlockTestService instance func NewTestBlockTestService(cfg *config.NucleusConfig, logger lumber.Logger) (*TestBlockTestService, error) { return &TestBlockTestService{ cfg: cfg, logger: logger, requests: requestutils.New(logger), endpoint: global.NeuronHost + "/blocktest", blockTestEntities: make(map[string][]blocktest), errChan: make(chan error, 1), }, nil }
/** * Create <plugins> node under a given parent node name if not already exists * (either <build><plugins> or <build><pluginManagement><plugins>) * Parent node would be created under the root node of the pom if not exists. * * @param xml Pom file as an org.w3c.Document object * @param parentNodeName Under which parent this plugins node should be created * @return 'plugins' node in a pom file */ private static Node createPluginsNode(Document xml, String parentNodeName) { Node root = xml.getDocumentElement(); NodeList rootChildren = root.getChildNodes(); boolean parentExists = false; Node parent = null; for (int i = 0; i < rootChildren.getLength(); i++) { if (rootChildren.item(i).getNodeName().equals(Constants.Maven.MAVEN_TAG_BUILD)) { parentExists = true; parent = rootChildren.item(i); break; } } if (!parentExists) { Node parentNode = xml.createElement(Constants.Maven.MAVEN_TAG_BUILD); root.appendChild(parentNode); parent = parentNode; } if (parentNodeName.equals(Constants.Maven.MAVEN_TAG_PLUGIN_MANAGEMENT)) { NodeList parentChildren = parent.getChildNodes(); boolean pluginManagementExists = false; for (int i = 0; i < parentChildren.getLength(); i++) { if (parentChildren.item(i).getNodeName().equals(Constants.Maven.MAVEN_TAG_PLUGIN_MANAGEMENT)) { pluginManagementExists = true; parent = parentChildren.item(i); break; } } if (!pluginManagementExists) { Node pluginManagement = xml.createElement(Constants.Maven.MAVEN_TAG_PLUGIN_MANAGEMENT); parent.appendChild(pluginManagement); parent = pluginManagement; } } NodeList parentChildren = parent.getChildNodes(); boolean pluginsExists = false; Node plugins = null; for (int i = 0; i < parentChildren.getLength(); i++) { if (parentChildren.item(i).getNodeName().equals(Constants.Maven.MAVEN_TAG_PLUGINS)) { pluginsExists = true; plugins = parentChildren.item(i); break; } } if (!pluginsExists) { plugins = xml.createElement(Constants.Maven.MAVEN_TAG_PLUGINS); parent.appendChild(plugins); } return plugins; }
import java.util.ArrayList; import java.util.Collections; import java.util.Scanner; public class shere { static ArrayList<Integer> list = new ArrayList<>(); static int contor = 0, n, l, r, x; static void verify(int sol[], int k) { int nr = 0, suma = 0; int begin = -1, end = -1; for(int i = 0; i < k; i++) { if(sol[i] != 0) { nr++; suma += list.get(i); if(begin < 0) begin = i; else end = i; } } if(nr >= 2 && suma >= l && suma <= r && list.get(end) - list.get(begin) >= x) contor++; } static void bkt(int sol[], int k) { if(k == list.size()) verify(sol, k); else for(int i = 0; i <= 1; i++) { sol[k] = i; bkt(sol, k+1); } } public static void main(String[] args) { Scanner sc = new Scanner(System.in); n = sc.nextInt(); l = sc.nextInt(); r = sc.nextInt(); x = sc.nextInt(); for(int i = 0; i < n; i++) list.add(sc.nextInt()); int sol[] = new int[100100]; Collections.sort(list); bkt(sol, 0); System.out.println(contor); } }
/* * There is some PCI addresses that are not mapped to any register. * Accessing some of them lead to BUS error. So it is unreasonable to access * the invalid addresses. */ static int soc_cpureg_address_is_invalid(int unit, soc_regaddrinfo_t *ainfo) { int off; off = SOC_REG_BASE(unit, ainfo->reg); if (soc_feature(unit, soc_feature_time_v3_no_bs)){ if (off >= CMIC_BS0_CONFIG_OFFSET && off <= CMIC_BS1_INITIAL_CRC_OFFSET){ return TRUE; } } if (off >= CMIC_MMU_COSLC_COUNT_ADDR_OFFSET && off < CMIC_TIM0_TIMER1LOAD_OFFSET) { return TRUE; } switch (ainfo->reg) { case CMIC_CMC0_FSCHAN_DATA32r : case CMIC_CMC0_FSCHAN_DATA64_LOr : case CMIC_FSCHAN_DATA32r : case CMIC_FSCHAN_DATA64_LOr : case CMIC_CMC1_FSCHAN_DATA32r : case CMIC_CMC1_FSCHAN_DATA64_LOr : case CMIC_CMC2_FSCHAN_DATA32r : case CMIC_CMC2_FSCHAN_DATA64_LOr : return TRUE; default: break; } return FALSE; }
/** * Tries to create a PKCS#12 file with a DSTU4145 cert. * This test requires a patched BouncyCastle. */ @Test public void testP12KeystoreDSTU4145() throws Exception { log.debug("DSTU4145 configured: "+(AlgorithmTools.isDstu4145Enabled() ? "YES" : "NO")); assumeTrue(AlgorithmTools.isDstu4145Enabled()); log.trace(">testP12KeystoreDSTU4145()"); String keyspec = CesecoreConfiguration.getExtraAlgSubAlgName("dstu4145", "233"); assertNotNull("curve 233 is not configued!", keyspec); AlgorithmParameterSpec spec = KeyTools.dstuOidToAlgoParams(keyspec); assertNotNull(spec); testAlgorithm("DSTU4145", AlgorithmConstants.KEYALGORITHM_DSTU4145, AlgorithmConstants.SIGALG_GOST3411_WITH_DSTU4145, spec); log.trace("<testP12KeystoreDSTU4145()"); }
/** * GUI for the Fractaliser. Main method is the entry point for the program. */ public class GUIFrame extends JFrame implements IFSDefFrameListener { private static final int DETERMINISTIC = 0; private static final int RANDOM = 1; private int algorithmType; private boolean usingCustom; private int initPixX; private int initPixY; private int depth; private int threshold; private IFS ifs; private IFS customIFS; private JPanel panel; private FractalView fv; // menubar items private JMenuBar menuBar; private JMenu algorithmMenu; private JRadioButtonMenuItem aDetMenuItem; private JRadioButtonMenuItem aRandMenuItem; private JMenu ifsMenu; private JRadioButtonMenuItem iFernMenuItem; private JRadioButtonMenuItem iGasketMenuItem; private JRadioButtonMenuItem iSquareMenuItem; private JRadioButtonMenuItem iCustomMenuItem; private JMenuItem iDefCustomMenuItem; private JMenu renderingMenu; private JMenuItem rInitPixMenuItem; private JMenuItem rRecurDepthMenuItem; private JMenuItem rRenderThresMenuItem; private JMenuItem rRenderMenuItem; /** * Entry point for the program. No arguments used. * @param args the command line arguments */ public static void main(String[] args) { java.awt.EventQueue.invokeLater(new Runnable() { public void run() { new GUIFrame().setVisible(true); } }); } /** * Constructor for GUIFrames. */ public GUIFrame() { super("Fractaliser"); // set up default values... algorithmType = DETERMINISTIC; usingCustom = false; initPixX = 500; initPixY = 500; depth = 50; threshold = 25; ifs = new Fern(); customIFS = new IFS(); initComponents(); } /** * Sets up GUI Frame components. */ private void initComponents() { // create menubar menuBar = new JMenuBar(); // create Algorithms menu algorithmMenu = new JMenu("Algorithm"); ButtonGroup algorithmButtonGroup = new ButtonGroup(); aDetMenuItem = new JRadioButtonMenuItem("Deterministic"); aDetMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { aDetMenuItemActionPerformed(evt); } }); aDetMenuItem.setSelected(true); aRandMenuItem = new JRadioButtonMenuItem("Random"); aRandMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { aRandMenuItemActionPerformed(evt); } }); algorithmButtonGroup.add(aDetMenuItem); algorithmButtonGroup.add(aRandMenuItem); algorithmMenu.add(aDetMenuItem); algorithmMenu.add(aRandMenuItem); // create IFS menu ifsMenu = new JMenu("IFS"); ButtonGroup ifsButtonGroup = new ButtonGroup(); iFernMenuItem = new JRadioButtonMenuItem("Fern"); iFernMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { iFernMenuItemActionPerformed(evt); } }); iFernMenuItem.setSelected(true); iGasketMenuItem = new JRadioButtonMenuItem("Gasket"); iGasketMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { iGasketMenuItemActionPerformed(evt); } }); iSquareMenuItem = new JRadioButtonMenuItem("Square"); iSquareMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { iSquareMenuItemActionPerformed(evt); } }); iCustomMenuItem = new JRadioButtonMenuItem("Custom"); iCustomMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { iCustomMenuItemActionPerformed(evt); } }); iDefCustomMenuItem = new JMenuItem("Define custom..."); iDefCustomMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { iDefCustomMenuItemActionPerformed(evt); } }); ifsButtonGroup.add(iFernMenuItem); ifsButtonGroup.add(iGasketMenuItem); ifsButtonGroup.add(iSquareMenuItem); ifsButtonGroup.add(iCustomMenuItem); ifsMenu.add(iFernMenuItem); ifsMenu.add(iGasketMenuItem); ifsMenu.add(iSquareMenuItem); ifsMenu.add(iCustomMenuItem); ifsMenu.addSeparator(); ifsMenu.add(iDefCustomMenuItem); // create Rendering Menu renderingMenu = new JMenu("Rendering"); rInitPixMenuItem = new JMenuItem("Initial pixel..."); rInitPixMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rInitPixMenuItemActionPerformed(evt); } }); rRecurDepthMenuItem = new JMenuItem("Recursion depth..."); rRecurDepthMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rRecurDepthMenuItemActionPerformed(evt); } }); rRenderThresMenuItem = new JMenuItem("Rendering threshold..."); rRenderThresMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rRenderThresMenuItemActionPerformed(evt); } }); rRenderMenuItem = new JMenuItem("Render"); rRenderMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rRenderMenuItemActionPerformed(evt); } }); rRenderMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_R, 0)); renderingMenu.add(rInitPixMenuItem); renderingMenu.add(rRecurDepthMenuItem); renderingMenu.add(rRenderThresMenuItem); renderingMenu.addSeparator(); renderingMenu.add(rRenderMenuItem); // bring it all together menuBar.add(algorithmMenu); menuBar.add(ifsMenu); menuBar.add(renderingMenu); setJMenuBar(menuBar); panel = new JPanel(); panel.setLayout(new GridBagLayout()); GridBagConstraints c = new GridBagConstraints(); c.gridx = 0; c.gridy = 0; c.fill = GridBagConstraints.BOTH; c.weightx = 1; c.weighty = 1; c.anchor = GridBagConstraints.CENTER; fv = new FractalView(); panel.add(fv, c); setContentPane(panel); setResizable(false); pack(); setLocationRelativeTo(null); // Centre on the screen setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } /** * Reacts to the Algorithm/Deterministic button being clicked. * Sets the current algorithm to Determinstic. * @param evt */ public void aDetMenuItemActionPerformed(java.awt.event.ActionEvent evt) { algorithmType = DETERMINISTIC; } /** * Reacts to the Algorithm/Random button being clicked. * Sets the current algorithm to Random. * @param evt */ public void aRandMenuItemActionPerformed(java.awt.event.ActionEvent evt) { algorithmType = RANDOM; } /** * Reacts to the IFS/Fern button being clicked. * Sets the current IFS to Fern. * @param evt */ public void iFernMenuItemActionPerformed(java.awt.event.ActionEvent evt) { ifs = new Fern(); usingCustom = false; } /** * Reacts to the IFS/Gasket button being clicked. * Sets the curretn IFS to Gasket. * @param evt */ public void iGasketMenuItemActionPerformed(java.awt.event.ActionEvent evt) { ifs = new Gasket(); usingCustom = false; } /** * Reacts to the IFS/Square button being clicked. * Sets the current IFS to Square. * @param evt */ public void iSquareMenuItemActionPerformed(java.awt.event.ActionEvent evt) { ifs = new Square(); usingCustom = false; } /** * Reacts to the IFS/Custom button being clicked. * Sets the current IFS to Custom. * @param evt */ public void iCustomMenuItemActionPerformed(java.awt.event.ActionEvent evt) { ifs = customIFS; usingCustom = true; } /** * Reacts to the IFS/Define Custom... button being clicked. * Displays IFSDefFrame to define a custom IFS. * @param evt */ public void iDefCustomMenuItemActionPerformed(java.awt.event.ActionEvent evt) { new IFSDefFrame(this).setVisible(true); } /** * Reacts to the Rendering/Initial Pixel... button being clicked. * Displays dialog box to get the initial pixel co-ordinates. * @param evt */ public void rInitPixMenuItemActionPerformed(java.awt.event.ActionEvent evt) { String input = JOptionPane.showInputDialog(this, "Enter co-ordinates, each between 0 and 999 inclusive (format: x, y)..."); if (input != null) { String[] inputs = input.split(","); if (inputs.length == 2) { try { int xvalue = Integer.parseInt(inputs[0].trim()); int yvalue = Integer.parseInt(inputs[1].trim()); if (xvalue >= 0 && xvalue < 1000 && yvalue >= 0 && yvalue < 1000) { initPixX = xvalue; initPixY = yvalue; } else { JOptionPane.showMessageDialog(this, "Both numbers must be between 0 and 999 inclusive."); } } catch (Exception e) { JOptionPane.showMessageDialog(this, "One or both values are not valid integers."); } } else { JOptionPane.showMessageDialog(this, "The format is x, y - two numbers separated by a comma."); } } } /** * Reacts to the Rendering/Recursion Depth... button being clicked. * Displays a dialog box to get the recursion depth. * @param evt */ public void rRecurDepthMenuItemActionPerformed(java.awt.event.ActionEvent evt) { String input = JOptionPane.showInputDialog(this, "Recursion depth..."); if (input != null) { try { int value = Integer.parseInt(input); if (value >= 0) { depth = value; } else { JOptionPane.showMessageDialog(this, "Recursion depth must be positive."); } } catch (Exception e) { JOptionPane.showMessageDialog(this, "That is not a valid number."); } } } /** * Reacts to the Rendering/Rendering Threshold... button being clicked. * Displays a dialog box to get the rendering threshold. * @param evt */ public void rRenderThresMenuItemActionPerformed(java.awt.event.ActionEvent evt) { String input = JOptionPane.showInputDialog(this, "Rendering threshold..."); if (input != null) { try { int value = Integer.parseInt(input); if (value >= 0) { threshold = value; } else { JOptionPane.showMessageDialog(this, "Rendering threshold must be positive."); } } catch (Exception e) { JOptionPane.showMessageDialog(this, "That is not a valid number."); } } } /** * Reacts to the Rendering/Render button being clicked. * Renders the currently-set IFS and options. * @param evt */ public void rRenderMenuItemActionPerformed(java.awt.event.ActionEvent evt) { if(algorithmType == RANDOM && threshold >= depth) { JOptionPane.showMessageDialog(this, "Rendering threshold equals or exceeds recursion depth.\nPrepare for an underwhelming prefractal."); } setTitle("Fractaliser - Rendering..."); if (algorithmType == DETERMINISTIC) { fv.renderDeterministic(ifs, depth, initPixX, initPixY); } else if (algorithmType == RANDOM) { fv.renderRandom(ifs, depth, threshold, initPixX, initPixY); } else { // well, this shouldn't happen } setTitle("Fractaliser"); } /** * Sets the custom IFS. * @param customIFS The IFS to set. */ public void setCustomIFS(IFS customIFS) { this.customIFS = customIFS; if(usingCustom) { ifs = customIFS; } } }
<reponame>aloknigam247/cygnus<filename>src/parser/fa.cc /************************************************************************************ * MIT License * * * * Copyright (c) 2019 <NAME> * * * * Permission is hereby granted, free of charge, to any person obtaining a copy * * of this software and associated documentation files (the "Software"), to deal * * in the Software without restriction, including without limitation the rights * * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * * copies of the Software, and to permit persons to whom the Software is * * furnished to do so, subject to the following conditions: * * * * The above copyright notice and this permission notice shall be included in all * * copies or substantial portions of the Software. * * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * * SOFTWARE. * ************************************************************************************/ #ifdef EXTENDED_FEATURE #include "fa.h" #include <cstdlib> #include <cstring> #include <iostream> #include <iomanip> void StateTable::addEntry(int from, char sym, int to) { if(from >= row.size()) row.resize(from+1); // +1 may be wrong if(to >= row.size()) row.resize(to+1); row[from].sym[sym].push_back(to); if(row[from].tag.empty()) { row[from].tag += 'q'; row[from].tag += std::to_string(from); } if(row[to].tag.empty()) { row[to].tag += 'q'; row[to].tag += std::to_string(to); } } #ifdef EXTENDED_FEATURE Metrics StateTable::calcMetrics() const{ Metrics m; for(int i=0; i<129; ++i) { m.len[i] = 0; m.is_filled[i] = false; } int max_len=0, s; m.len[0] = 5; /* strlen(STATE) */ for(auto r: row) { int tag_size = r.tag.size(); if(r.final_state) tag_size += 2; /* for round bracket */ if(tag_size > m.len[0]) m.len[0] = tag_size; int len = m.len[0]+1; /* +1 for | */ for(int i=1; i<128; ++i) { s=0; if(r.sym[i].size()) { m.is_filled[i] = true; for(auto a: r.sym[i]) { s += std::to_string(a).size(); } s += r.sym[i].size()-1; if(s>m.len[i]) m.len[i] = s; } if(m.is_filled[i]) len += m.len[i]+1; /* +1 for | */ } len += 1; /* +1 for | */ if(len > max_len) max_len = len; } m.max_len = max_len; return m; } template<typename T> void col(Metrics& m, int i, T t) { std::cout << '|'; std::cout.width(m.len[i]); std::cout << t; std::cout.width(0); } void line(int len) { for(int i=0; i<len; ++i) std::cout << '-'; std::cout << '\n'; } void StateTable::print() const { Metrics m = calcMetrics(); line(m.max_len); col(m, 0, "State"); for(int i=32; i<128; ++i) { if(!m.is_filled[i]) continue; switch(i) { case 92: case 127: col(m, i, ' '); break; default: col(m, i, char(i)); } } std::cout << '|'; std::cout << '\n'; line(m.max_len); for(auto r: row) { if(r.final_state) { std::string s; s = '('; s+= r.tag; s += ')'; col(m, 0, s); } else col(m,0,r.tag); for(int i=32; i<128; ++i) { if(!m.is_filled[i]) continue; if(!r.sym[i].size()) { col(m, i, ' '); } else { std::string st; for(auto a: r.sym[i]) { st += std::to_string(a); st += ','; } st.resize(st.size()-1); col(m,i,st); } } std::cout << "|\n"; line(m.max_len); } } void StateTable::printDot(std::ofstream& file) const { /*file << "digraph fa {\n" << " rankdir=LR;\n"; for(int r=0; r<100; ++r) { for(int c=0; c<100; ++c) { if(!state_entry[r][c].empty()) file << " {"; if(is_final[r]) file << "node [shape=doublecircle] "; file << entry->tag << "} -> {"; if(is_final[c]) file << "node [shape=doublecircle] "; file << state_entry[transition.state_id]->tag << "} [label=\"" << transition.sym << "\"]\n"; } } } file << "}\n";*/ } void FA::printTable() const { table.print(); } void FA::printDot(const char* file_stem) const { std::ofstream dotfile(file_stem); table.printDot(dotfile); dotfile.close(); /*std::string command; command = "dot -Tpng "; command += file_stem; command += " > "; command += file_stem; command += ".png"; std::system(command.c_str());*/ } int FA::addTransition(int from, char sym, int to) { if(to == -1) to = ++state_id; table.addEntry(from, sym, to); return to; } #endif int FA::addTransition(const std::vector<int>& from, char sym, int to) { if(to == -1) to = ++state_id; for(auto f: from) table.addEntry(f, sym, to); return to; } #endif
<reponame>devdut1999/Algorithms-for-Programming /* C++ program to run BFS on a Undirected Graph Functional for both weighted and unweighted graphs Contributed to Data-Science-Community-SRM on Github Contributor: Santhosh-Vardhan */ #include <bits/stdc++.h> using namespace std; class graph { int v; vector<pair<int,int>> *adj;//Adjacency list that holds both vertex and weights public: graph(int ver)//When a graph is created, memory is allocated { v = ver; adj = new vector<pair<int,int>>[v+1]; } void addEdge(int st, int nd, int wt); void BFS(int st); }; void graph::addEdge(int st, int nd, int wt = 0) { //If an unweighted graph is required, don't pass in third argument while adding edges adj[st].push_back(make_pair(nd,wt));//Adds the edges to the adjacency list adj[nd].push_back(make_pair(st,wt)); } void graph::BFS(int st)//this bfs only prints the traversal, st - starting vertex { deque<int> dq;//Queue for the BFS bool visited[v+1] = {false};//setting all visited to false dq.push_back(st); cout<<"--BFS--"<<endl; cout<<"STARTING VERTEX "<<st<<endl; while(!dq.empty()) { visited[*dq.begin()]= true; for(auto it = adj[*dq.begin()].begin();it!=adj[*dq.begin()].end();it++) { int vert = (*it).first;//Storing the current node and weight in separate variables for readability int weit = (*it).second; if(visited[vert]==true) continue; else { cout<<"VERTEX :"<<vert<<endl;//Printing the vertex // cout<<"WEIGHT :"<<weit<<endl;//Uncomment this if you want to print weights visited[vert] = true; dq.push_back(vert); } } dq.pop_front();//Removing the first vertex from Queue after performing BFS on it } } int main() { //graph g(n); creates a graph with n vertices }
/** * * Talk with Spotify using the officla API * * @author shamalaya * */ @Log4j public class SpotyWeb { /** * Authorization sync between threads */ public static final CountDownLatch authorized = new CountDownLatch(1); /** * Keep track of expire time */ private long expires = 0L; /** * Spotify Application secrets and redirect url */ private String clientId; private String clientSecret; private URI redirectUri; /** * SpotifyApi */ private SpotifyApi spotifyApi; /** * Configuration */ private ParseJSONConfig conf; /** * Single instance */ private static SpotyWeb spotyweb; /** * Spotify Scopers */ private final static String SCOPES = "user-read-currently-playing, " + "user-read-playback-state, " + "user-library-modify, " + "user-library-read"; // INITIALIZE private SpotyWeb(ParseJSONConfig conf) { // set configuration this.conf = conf; // set secrets clientId = conf.getConfiguration().getAuth().getAuth_clientId(); clientSecret = conf.getConfiguration().getAuth().getAuth_clientSecret(); redirectUri = SpotifyHttpManager.makeUri( "http://" + conf.getConfiguration().getServer().getServer_ip() + ":" + conf.getConfiguration().getServer().getServer_port() + "/" + conf.getConfiguration().getServer().getServer_path() ); // set api spotifyApi = new SpotifyApi.Builder() .setClientId(clientId) .setClientSecret(clientSecret) .setRedirectUri(redirectUri) .build(); } public static final SpotyWeb initialize(ParseJSONConfig conf) { if( spotyweb==null ) { spotyweb = new SpotyWeb(conf); } return spotyweb; } public static final SpotyWeb getInstance() { return spotyweb; } // GET URI public void authorizationCodeUri() throws Exception { final AuthorizationCodeUriRequest authorizationCodeUriRequest = spotifyApi.authorizationCodeUri() .state("x4xkmn9pu3j6ukrs8n") .scope( SCOPES ) .show_dialog(true) .build(); final URI uri = authorizationCodeUriRequest.execute(); if (Desktop.isDesktopSupported()) { Desktop.getDesktop().browse(new URI(uri.toString())); } else { log.info("Please manualy open this url: " + uri.toString()); } } // AUTHORIZE public void authorizationCode(String code) throws Exception { final AuthorizationCodeRequest authorizationCodeRequest = spotifyApi.authorizationCode(code).build(); final AuthorizationCodeCredentials authorizationCodeCredentials = authorizationCodeRequest.execute(); // Set access and refresh token for further "spotifyApi" object usage spotifyApi.setAccessToken(authorizationCodeCredentials.getAccessToken()); spotifyApi.setRefreshToken(authorizationCodeCredentials.getRefreshToken()); log.info("Token expires in: " + authorizationCodeCredentials.getExpiresIn()); log.info("Refresh Token: " + authorizationCodeCredentials.getRefreshToken()); // set expire time expires = System.currentTimeMillis() + ( authorizationCodeCredentials.getExpiresIn() * 1000 ); // save refresh token in configuration if( authorizationCodeCredentials.getRefreshToken() != null ) { conf.getConfiguration().getAuth().setAuth_refreshToken( authorizationCodeCredentials.getRefreshToken() ); conf.writeConfiguration(); } } // REFRESH public void authorizationCodeRefresh() throws Exception { // reload refresh token if( spotifyApi.getRefreshToken() == null ) { spotifyApi.setRefreshToken(conf.getConfiguration().getAuth().getAuth_refreshToken()); } final AuthorizationCodeRefreshRequest authorizationCodeRefreshRequest = spotifyApi.authorizationCodeRefresh().build(); final AuthorizationCodeCredentials authorizationCodeCredentials = authorizationCodeRefreshRequest.execute(); spotifyApi.setAccessToken(authorizationCodeCredentials.getAccessToken()); spotifyApi.setRefreshToken(authorizationCodeCredentials.getRefreshToken()); log.info("Renew Expires in: " + authorizationCodeCredentials.getExpiresIn()); log.info("Renew Refresh Token: " + authorizationCodeCredentials.getRefreshToken()); // set expire time expires = System.currentTimeMillis() + ( authorizationCodeCredentials.getExpiresIn() * 1000 ); // save refresh token in configuration if( authorizationCodeCredentials.getRefreshToken() != null ) { conf.getConfiguration().getAuth().setAuth_refreshToken( authorizationCodeCredentials.getRefreshToken() ); conf.writeConfiguration(); } } // CHECK EXPIRED private void checkExpired() throws Exception { if( System.currentTimeMillis() >= expires ) { authorizationCodeRefresh(); } } /**************** * ACTION CALLS * ****************/ /** * Current song * * @return * @throws Exception * @throws SpotifyWebApiException */ public CurrentlyPlayingContext getInformationAboutUsersCurrentPlayback() throws Exception, SpotifyWebApiException { long start = System.currentTimeMillis(); checkExpired(); GetInformationAboutUsersCurrentPlaybackRequest getInformationAboutUsersCurrentPlaybackRequest = spotifyApi.getInformationAboutUsersCurrentPlayback() .market(CountryCode.IT) .additionalTypes("track,episode") .build(); CurrentlyPlayingContext ris = getInformationAboutUsersCurrentPlaybackRequest.execute(); log.info("getInformationAboutUsersCurrentPlayback(): info retrieved in " + (System.currentTimeMillis()-start) + " ms" ); return ris; } /** * Save track in bookmark * * @param id * @throws SpotifyWebApiException * @throws IOException */ public void saveTracksForUser(String id) throws Exception { checkExpired(); SaveTracksForUserRequest saveTracksForUserRequest = spotifyApi.saveTracksForUser( new String[] { id } ).build(); String ris = saveTracksForUserRequest.execute(); log.info("saveTracksForUser(): " + ris); } /** * Remove track from bookmark * * @param id * @throws SpotifyWebApiException * @throws IOException */ public void removeTracksForUser(String id) throws Exception { checkExpired(); RemoveUsersSavedTracksRequest saveTracksForUserRequest = spotifyApi.removeUsersSavedTracks( new String[] { id } ).build(); String ris = saveTracksForUserRequest.execute(); log.info("removeTracksForUser(): " + ris); } /** * Check if track is saved in user's music * * @param id * @return * @throws Exception */ public boolean checkTracksForUser(String id) throws Exception { checkExpired(); CheckUsersSavedTracksRequest checkTracksForUserRequest = spotifyApi.checkUsersSavedTracks( new String[] { id } ).build(); Boolean[] ris = checkTracksForUserRequest.execute(); log.info("checkTracksForUser(): " + ris[0]); return ris[0]; } }
import sys flush = sys.stdout.flush def leEntrada(a, b): print('?', a, b) flush() entrada = int(input()) return entrada def imprimeSaida(a): saida = '' for i in a: saida += str(i) + " " print('!', saida) flush() tamanho = int(input()) lista = [] entrada_a = leEntrada(1, 2) entrada_b = leEntrada(2, 3) entrada_c = leEntrada(1, 3) aux1 = (entrada_a + entrada_c - entrada_b)//2 lista.append(aux1) lista.append((entrada_a + entrada_b - entrada_c)//2) lista.append((entrada_b + entrada_c - entrada_a)//2) for i in range(4, tamanho + 1): aux2 = leEntrada(1, i) lista.append(aux2 - aux1) imprimeSaida(lista)
/** * The makeRequest method allows the user to create a search request with the optional useragent field * @param mx The MX of the search is the maximum wait time (in seconds) that the program should wait for responses * @param st The search target of the search must be in a set format as outlined in page 31 of <a href="http://www.upnp.org/specs/arch/UPnP-arch-DeviceArchitecture-v1.1.pdf">The UPnP Device Architecture</a> * @param useragent The useragent of the search in format OS/version UPnP/1.1 product/version * @return String - Returns a formatted SSDP MSEARCH Request with MX and ST of parameters */ public static String makeRequest(int mx, String st, String useragent) { String request = REQUEST_HEADER + HOST_HEADER + MAN_HEADER + String.format(MX_HEADER.toString(), mx) + String.format(ST_HEADER, st) + String.format(USERAGNT_HEADER, useragent) + LINE_BREAK; return request; }
<reponame>moxxiq/online-diary<filename>services/backend/migrations/versions/ca836029635c_refactor_add_unique_constraints.py """Refactor. Add unique constraints Revision ID: ca836029635c Revises: 769871c38a82 Create Date: 2021-12-04 23:48:59.660636 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = 'ca836029635c' down_revision = '769871c38a82' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('classes', 'number', existing_type=sa.INTEGER(), nullable=False, existing_comment='The year the students went to first grade ') op.alter_column('marks', 'creation_date', existing_type=postgresql.TIMESTAMP(), nullable=False) op.alter_column('students', 'class_id', existing_type=sa.INTEGER(), nullable=False) op.alter_column('subjects', 'name', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('users', 'type', existing_type=sa.INTEGER(), comment='1 - admin, 2 - teacher, 3 - student ...', existing_nullable=False) op.alter_column('users', 'birthday', existing_type=sa.DATE(), nullable=True) op.alter_column('work_types', 'name', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('workplaces', 'class_id', existing_type=sa.INTEGER(), nullable=False) op.alter_column('workplaces', 'subject_id', existing_type=sa.INTEGER(), nullable=False) op.alter_column('workplaces', 'teacher_id', existing_type=sa.INTEGER(), nullable=False) op.alter_column('works', 'workplace_id', existing_type=sa.INTEGER(), nullable=False) op.alter_column('works', 'headline', existing_type=postgresql.TIMESTAMP(), nullable=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('works', 'headline', existing_type=postgresql.TIMESTAMP(), nullable=True) op.alter_column('works', 'workplace_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('workplaces', 'teacher_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('workplaces', 'subject_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('workplaces', 'class_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('work_types', 'name', existing_type=sa.VARCHAR(), nullable=True) op.alter_column('users', 'birthday', existing_type=sa.DATE(), nullable=False) op.alter_column('users', 'type', existing_type=sa.INTEGER(), comment=None, existing_comment='1 - admin, 2 - teacher, 3 - student ...', existing_nullable=False) op.alter_column('subjects', 'name', existing_type=sa.VARCHAR(), nullable=True) op.alter_column('students', 'class_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('marks', 'creation_date', existing_type=postgresql.TIMESTAMP(), nullable=True) op.alter_column('classes', 'number', existing_type=sa.INTEGER(), nullable=True, existing_comment='The year the students went to first grade ') # ### end Alembic commands ###
/** * Represents the minor tick on the axis line. * * @author Syam */ public static class MinorTicks extends AbstractTicks { private int divisions = 0; /** * Constructor. */ public MinorTicks() { } @Override protected void buildProperties() { super.buildProperties(); property("splitNumber", divisions, divisions > 0); } /** * Get number of divisions. * * @return Number of divisions. */ public final int getDivisions() { return divisions; } /** * Set number of divisions. * * @param divisions Number of divisions. */ public void setDivisions(int divisions) { this.divisions = divisions; } }
<gh_stars>10-100 //! Uses the `nom` library to parse the in memory format of perf data structures and //! transforms them into more rust-like data-strutures. //! //! # References //! The code is inspired by the following articles and existing parser to make sense of the //! (poorly documented) format: //! //! * https://lwn.net/Articles/644919/ //! * http://man7.org/linux/man-pages/man2/perf_event_open.2.html //! * https://github.com/andikleen/pmu-tools/tree/master/parser //! //! # Current limitations //! * Only version 2 of the data format //! * No support for AUX stuff //! * Sample ID at the end of records is currently ignored //! * I'm not sure if I'm parsing the BuildId correctly, it seems it can not be recognized //! * Only support little endian machines //! //! # See also //! * `perf_file.rs` -- as an example on how to use the parser function to parse a perf.data file //! * `perf_format.rs` -- for all the struct definitions that are parsed here //! use super::perf_format::*; use nom::*; fn is_nul_byte(c: u8) -> bool { c == 0x0 } named!(parse_c_string, take_till!(is_nul_byte)); named!(parse_vec_u64<&[u8], Vec<u64> >, do_parse!( len: le_u64 >> vec: count!(le_u64, len as usize) >> (vec) ) ); named!(parse_vec_u32_u8<&[u8], Vec<u8> >, do_parse!( len: le_u32 >> vec: count!(le_u8, len as usize) >> (vec) ) ); fn parse_vec_u64_variable(input: &[u8], count: usize) -> IResult<&[u8], Vec<u64>> { count!(input, le_u64, count) } fn parse_vec_u8_variable(input: &[u8], count: usize) -> IResult<&[u8], Vec<u8>> { count!(input, le_u8, count) } fn no_event(input: &[u8]) -> IResult<&[u8], EventData> { Ok((input, EventData::None)) } // TODO: Needs sample flags! named!(pub parse_sample_id<&[u8], SampleId>, do_parse!( ptid: parse_thread_id >> time: le_u64 >> id: le_u64 >> stream_id: le_u64 >> cpu: parse_cpu >> identifier: le_u64 >> (SampleId { ptid: ptid, time: time, id: id, stream_id: stream_id, cpu: cpu, identifier: identifier }) ) ); named!(pub parse_thread_id<&[u8], ThreadId>, do_parse!( pid: le_i32 >> tid: le_i32 >> (ThreadId { pid: pid, tid: tid }) ) ); named!(pub parse_cpu<&[u8], Cpu>, do_parse!( cpu: le_u32 >> res: le_u32 >> (Cpu { cpu: cpu, res: res }) ) ); named!(pub parse_fork_record<&[u8], ForkRecord>, do_parse!( pid: le_u32 >> ppid: le_u32 >> tid: le_u32 >> ptid: le_u32 >> time: le_u64 >> (ForkRecord { pid: pid, ppid: ppid, tid: tid, ptid: ptid, time: time, }) ) ); named!(pub parse_exit_record<&[u8], ExitRecord>, do_parse!( pid: le_u32 >> ppid: le_u32 >> tid: le_u32 >> ptid: le_u32 >> time: le_u64 >> (ExitRecord { pid: pid, ppid: ppid, tid: tid, ptid: ptid, time: time, }) ) ); named!(pub parse_throttle_record<&[u8], ThrottleRecord>, do_parse!( time: le_u64 >> id: le_u64 >> stream_id: le_u64 >> (ThrottleRecord { time: time, id: id, stream_id: stream_id, }) ) ); named!(pub parse_unthrottle_record<&[u8], UnthrottleRecord>, do_parse!( time: le_u64 >> id: le_u64 >> stream_id: le_u64 >> (UnthrottleRecord { time: time, id: id, stream_id: stream_id, }) ) ); named!(pub parse_event_header<&[u8], EventHeader>, do_parse!( event_type: le_u32 >> misc: le_u16 >> size: le_u16 >> (EventHeader { event_type: EventType::new(event_type), misc: misc, size: size }) ) ); named!(pub parse_mmap_record<&[u8], MMAPRecord>, do_parse!( pid: le_i32 >> tid: le_u32 >> addr: le_u64 >> len: le_u64 >> pgoff: le_u64 >> filename: parse_c_string >> (MMAPRecord { pid: pid, tid: tid, addr: addr, len: len, pgoff: pgoff, filename: unsafe { String::from_utf8_unchecked(filename.to_vec()) } }) ) ); named!(pub parse_mmap2_record<&[u8], MMAP2Record>, do_parse!( ptid: parse_thread_id >> addr: le_u64 >> len: le_u64 >> pgoff: le_u64 >> maj: le_u32 >> min: le_u32 >> ino: le_u64 >> ino_generation: le_u64 >> prot: le_u32 >> flags: le_u32 >> filename: parse_c_string >> // TODO: sample_id: parse_sample_id, (MMAP2Record { ptid: ptid, addr: addr, len: len, pgoff: pgoff, maj: maj, min: min, ino: ino, ino_generation: ino_generation, prot: prot, flags: flags, filename: unsafe { String::from_utf8_unchecked(filename.to_vec()) } }) ) ); pub fn parse_read_value( input: &[u8], flags: ReadFormatFlags, ) -> IResult<&[u8], (u64, Option<u64>)> { do_parse!( input, value: le_u64 >> id: cond!(flags.has_id(), le_u64) >> (value, id) ) } pub fn parse_read_format(input: &[u8], flags: ReadFormatFlags) -> IResult<&[u8], ReadFormat> { if flags.has_group() { do_parse!( input, nr: le_u64 >> time_enabled: cond!(flags.has_total_time_enabled(), le_u64) >> time_running: cond!(flags.has_total_time_running(), le_u64) >> values: count!(call!(parse_read_value, flags), nr as usize) >> (ReadFormat { time_enabled: time_enabled, time_running: time_running, values: values }) ) } else { do_parse!( input, value: le_u64 >> time_enabled: cond!(flags.has_total_time_enabled(), le_u64) >> time_running: cond!(flags.has_total_time_running(), le_u64) >> id: cond!(flags.has_id(), le_u64) >> (ReadFormat { time_enabled: time_enabled, time_running: time_running, values: vec![(value, id)] }) ) } } named!(pub parse_branch_entry<&[u8], BranchEntry>, do_parse!( from: le_u64 >> to: le_u64 >> flags: le_u64 >> (BranchEntry { from: from, to: to, flags: flags, }) ) ); pub fn parse_branch_entries( input: &[u8], flags: SampleFormatFlags, ) -> IResult<&[u8], Vec<BranchEntry>> { // TODO: bug? https://github.com/Geal/nom/issues/302 assert!(flags.has_branch_stack() && flags.has_regs_user()); do_parse!( input, // TODO: bug? https://github.com/Geal/nom/issues/302 //bnr: cond!(flags.has_branch_stack(), le_u64) ~ //entries: cond!(flags.has_branch_stack() && flags.has_regs_user(), count!(parse_branch_entry, 3)), bnr: le_u64 >> entries: count!(parse_branch_entry, bnr as usize) >> (entries) ) } pub fn parse_sample_record<'a>( input: &'a [u8], attr: &'a EventAttr, ) -> IResult<&'a [u8], SampleRecord> { let flags = attr.sample_type; let regcnt_user = attr.sample_regs_user.count_ones() as usize; let regcnt_intr = attr.sample_regs_intr.count_ones() as usize; do_parse!( input, sample_id: cond!(flags.has_identifier(), le_u64) >> ip: cond!(flags.has_ip(), le_u64) >> ptid: cond!(flags.has_tid(), parse_thread_id) >> time: cond!(flags.has_time(), le_u64) >> addr: cond!(flags.has_addr(), le_u64) >> id: cond!(flags.has_sample_id(), le_u64) >> stream_id: cond!(flags.has_stream_id(), le_u64) >> cpu: cond!(flags.has_cpu(), parse_cpu) >> period: cond!(flags.has_period(), le_u64) >> v: cond!(flags.has_read(), call!(parse_read_format, attr.read_format)) >> ips: cond!(flags.has_callchain(), parse_vec_u64) >> raw: cond!(flags.has_raw(), parse_vec_u32_u8) >> lbr: cond!(flags.has_branch_stack(), call!(parse_branch_entries, flags)) >> abi_user: cond!(flags.has_stack_user(), le_u64) >> regs_user: cond!( flags.has_stack_user(), call!(parse_vec_u64_variable, regcnt_user) ) >> user_stack_len: cond!(flags.has_stack_user(), le_u64) >> user_stack: cond!( flags.has_stack_user(), call!(parse_vec_u8_variable, user_stack_len.unwrap() as usize) ) >> dyn_size: cond!( flags.has_stack_user() && user_stack_len.unwrap() != 0, le_u64 ) >> weight: cond!(flags.has_weight(), le_u64) >> data_src: cond!(flags.has_data_src(), le_u64) >> transaction: cond!(flags.has_transaction(), le_u64) >> abi: cond!(flags.has_regs_intr(), le_u64) >> regs_intr: cond!( flags.has_regs_intr(), call!(parse_vec_u64_variable, regcnt_intr) ) >> (SampleRecord { sample_id: sample_id, ip: ip, ptid: ptid, time: time, addr: addr, id: id, stream_id: stream_id, cpu: cpu, period: period, v: v, ips: ips, raw: raw, lbr: lbr, abi_user: abi_user, regs_user: regs_user, user_stack: user_stack, dyn_size: dyn_size, weight: weight, data_src: data_src, transaction: transaction, abi: abi, regs_intr: regs_intr }) ) } pub fn parse_comm_record(input: &[u8]) -> IResult<&[u8], CommRecord> { do_parse!( input, ptid: parse_thread_id >> comm: parse_c_string >> // TODO: sample_id: parse_sample_id, (CommRecord { ptid: ptid, comm: unsafe { String::from_utf8_unchecked(comm.to_vec()) } }) ) } /// Parse an event record. pub fn parse_event<'a>(input: &'a [u8], attrs: &'a Vec<EventAttr>) -> IResult<&'a [u8], Event> { do_parse!( input, header: parse_event_header >> event: alt!( cond_reduce!( header.event_type == EventType::Mmap, map!(parse_mmap_record, EventData::MMAP) ) | cond_reduce!( header.event_type == EventType::Mmap2, map!(parse_mmap2_record, EventData::MMAP2) ) | cond_reduce!( header.event_type == EventType::Comm, map!(parse_comm_record, EventData::Comm) ) | cond_reduce!( header.event_type == EventType::Exit, map!(parse_exit_record, EventData::Exit) ) | cond_reduce!( header.event_type == EventType::Sample, map!(call!(parse_sample_record, &attrs[0]), EventData::Sample) ) | cond_reduce!( header.event_type == EventType::Fork, map!(parse_fork_record, EventData::Fork) ) | cond_reduce!( header.event_type == EventType::Unthrottle, map!(parse_unthrottle_record, EventData::Unthrottle) ) | cond_reduce!( header.event_type == EventType::Throttle, map!(parse_throttle_record, EventData::Throttle) ) | cond_reduce!( header.event_type == EventType::BuildId, map!( call!(parse_build_id_record, header.size()), EventData::BuildId ) ) | cond_reduce!(header.event_type == EventType::FinishedRound, no_event) | cond_reduce!(header.event_type.is_unknown(), no_event) ) >> (Event { header: header, data: event }) ) } // Parse a perf file section. named!(pub parse_file_section<&[u8], PerfFileSection>, do_parse!( offset: le_u64 >> size: le_u64 >> (PerfFileSection { offset: offset, size: size }) ) ); // Parse a perf string. named!(pub parse_perf_string<&[u8], String>, do_parse!( length: le_u32 >> bytes: take!(length as usize) >> ({ bytes.split(|c| *c == 0x0).next().map(|slice| unsafe { String::from_utf8_unchecked(slice.to_vec()) } ).unwrap_or(String::new()) }) ) ); // Parse a perf string list. named!(pub parse_perf_string_list<&[u8], Vec<String> >, do_parse!( nr: le_u32 >> strings: count!(parse_perf_string, nr as usize) >> (strings) ) ); named!(pub parse_nrcpus<&[u8], NrCpus>, do_parse!( nr_online: le_u32 >> nr_available: le_u32 >> (NrCpus { online: nr_online, available: nr_available }) ) ); pub fn parse_event_desc(input: &[u8]) -> IResult<&[u8], Vec<EventDesc>> { do_parse!( input, nr: le_u32 >> attr_size: le_u32 >> descs: count!( do_parse!( attr: flat_map!(take!(attr_size as usize), parse_event_attr) >> nr_ids: le_u32 >> event_string: parse_perf_string >> ids: call!(parse_vec_u64_variable, nr_ids as usize) >> (EventDesc { attr: attr, event_string: event_string, ids: ids }) ), nr as usize ) >> (descs) ) } named!(pub parse_cpu_topology<&[u8], CpuTopology>, do_parse!( cores: parse_perf_string_list >> threads: parse_perf_string_list >> (CpuTopology { cores: cores, threads: threads }) ) ); named!(pub parse_numa_node<&[u8], NumaNode>, do_parse!( nr: le_u32 >> mem_total: le_u64 >> mem_free: le_u64 >> cpu: parse_perf_string >> (NumaNode { node_nr: nr, mem_free: mem_free, mem_total: mem_total, cpus: cpu }) ) ); named!(pub parse_numa_topology<&[u8], Vec<NumaNode> >, do_parse!( nr: le_u32 >> nodes: count!(parse_numa_node, nr as usize) >> (nodes) ) ); named!(pub parse_pmu_mapping<&[u8], PmuMapping>, do_parse!( pmu_type: le_u32 >> pmu_name: parse_perf_string >> (PmuMapping { pmu_name: pmu_name, pmu_type: pmu_type }) ) ); named!(pub parse_pmu_mappings<&[u8], Vec<PmuMapping> >, do_parse!( nr: le_u32 >> nodes: count!(parse_pmu_mapping, nr as usize) >> (nodes) ) ); named!(pub parse_group_description<&[u8], GroupDesc>, do_parse!( string: parse_perf_string >> leader_idx: le_u32 >> nr_members: le_u32 >> (GroupDesc { string: string, leader_idx: leader_idx, nr_members: nr_members }) ) ); named!(pub parse_group_descriptions<&[u8], Vec<GroupDesc> >, do_parse!( nr: le_u32 >> nodes: count!(parse_group_description, nr as usize) >> (nodes) ) ); pub fn parse_build_id_record<'a>( input: &'a [u8], record_size: usize, ) -> IResult<&'a [u8], BuildIdRecord> { do_parse!( input, pid: le_i32 >> build_id: take!(24) >> filename: take!(record_size - 4 - 24) >> // header.size - offsetof(struct build_id_event, filename) (BuildIdRecord { pid: pid, build_id: build_id.to_owned(), filename: unsafe { String::from_utf8_unchecked(filename.to_vec()) } }) ) } // Parse a perf header named!(pub parse_header<&[u8], PerfFileHeader>, do_parse!( tag!("PERFILE2") >> size: le_u64 >> attr_size: le_u64 >> attrs: parse_file_section >> data: parse_file_section >> event_types: parse_file_section >> flags: bits!(do_parse!( nrcpus: take_bits!(u8, 1) >> arch: take_bits!(u8, 1) >> version: take_bits!(u8, 1) >> osrelease: take_bits!(u8, 1) >> hostname: take_bits!(u8, 1) >> build_id: take_bits!(u8, 1) >> tracing_data: take_bits!(u8, 1) >> take_bits!(u8, 1) >> branch_stack: take_bits!(u8, 1) >> numa_topology: take_bits!(u8, 1) >> cpu_topology: take_bits!(u8, 1) >> event_desc: take_bits!(u8, 1) >> cmdline: take_bits!(u8, 1) >> total_mem: take_bits!(u8, 1) >> cpuid: take_bits!(u8, 1) >> cpudesc: take_bits!(u8, 1) >> take_bits!(u8, 6) >> // padding group_desc: take_bits!(u8, 1) >> pmu_mappings: take_bits!(u8, 1) >> ({ HeaderFlags { nrcpus: nrcpus == 1, arch: arch == 1, version: version == 1, osrelease: osrelease == 1, hostname: hostname == 1, build_id: build_id == 1, tracing_data: tracing_data == 1, branch_stack: branch_stack == 1, numa_topology: numa_topology == 1, cpu_topology: cpu_topology == 1, event_desc: event_desc == 1, cmdline: cmdline == 1, total_mem: total_mem == 1, cpuid: cpuid == 1, cpudesc: cpudesc == 1, group_desc: group_desc == 1, pmu_mappings: pmu_mappings == 1 } }) )) >> take!(29) >> // reserved (PerfFileHeader { size: size, attr_size: attr_size, attrs: attrs, data: data, event_types: event_types, flags: flags }) ) ); // Parse a perf header named!(pub parse_event_attr<&[u8], EventAttr>, do_parse!( attr_type: le_u32 >> size: le_u32 >> config: le_u64 >> sample_period_freq: le_u64 >> sample_type: le_u64 >> read_format: le_u64 >> settings: le_u64 >> wakeup_events_watermark: le_u32 >> bp_type: le_u32 >> config1_or_bp_addr: le_u64 >> config2_or_bp_len: le_u64 >> branch_sample_type: le_u64 >> sample_regs_user: le_u64 >> sample_stack_user: le_u32 >> clock_id: le_i32 >> sample_regs_intr: le_u64 >> aux_watermark: le_u32 >> le_u32 >> // reserved (EventAttr { attr_type: attr_type, size: size, config: config, sample_period_freq: sample_period_freq, sample_type: SampleFormatFlags::from_bits_truncate(sample_type), read_format: ReadFormatFlags::from_bits_truncate(read_format), settings: EventAttrFlags::from_bits_truncate(settings), wakeup_events_watermark: wakeup_events_watermark, bp_type: bp_type, config1_or_bp_addr: config1_or_bp_addr, config2_or_bp_len: config2_or_bp_len, branch_sample_type: branch_sample_type, sample_regs_user: sample_regs_user, sample_stack_user: sample_stack_user, clock_id: clock_id, sample_regs_intr: sample_regs_intr, aux_watermark: aux_watermark, reserved: 0 }) ));
Up to 15,000 people protested in Waterford over the possible downgrading of Waterford Regional Hospital, and other cuts to the health service. The non-political event was organised by Gillian Savage Corcoran and Andrea Galgey, who say they are "just concerned citizens". Under the banner of "South East Take a Stand - Save Waterford Regional Hospital", a joint Facebook campaign with over 20,000 members was organised within a week. Thousands of people gathered at Ballybricken at midday for the rally against a possible downgrading of the hospital and other issues affecting the city and surrounding region. Following an address, the crowds marched down The Glen and Bridge Street, along the Quay and onto John Roberts Square. At Roberts Square, they met with another initiative, Waterford Gives a Shirt. The group collected over 20,000 shirts from Waterford people in recent weeks, to symbolise giving the Government the shirts from people’s backs. Gardaí and public representatives estimated that between 12,000 and 15,000 people attended the protest.
def _extract_pipeline_of_pvalueish(pvalueish): if isinstance(pvalueish, tuple): pvalue = pvalueish[0] elif isinstance(pvalueish, dict): pvalue = next(iter(pvalueish.values())) else: pvalue = pvalueish if hasattr(pvalue, 'pipeline'): return pvalue.pipeline return None
import {ModuleWithProviders, NgModule} from '@angular/core'; import {HttpClientModule} from '@angular/common/http'; import {ResourceService} from './resource-services/resource.service'; import {CollectionResolverService} from './collection/collection-resolver.service'; import {ResourceObjectResolverService} from './item/resource-object-resolver.service'; import {ItemCacheService} from './item/cache/item-cache.service'; import {MODULE_CONFIG, ModuleConfiguration} from './config/module-configuration'; import {ResourceDescriptorProvider} from './descriptor/provider/resource-descriptor-provider'; import {DefaultDescriptorProvider} from './descriptor/provider/default-descriptor-provider'; import {ResourceDescriptorResolverService} from './descriptor/resolver/resource-descriptor-resolver.service'; import {ResourceSchemaService} from './resource-services/resource-schema.service'; import {ResourceObjectPropertyFactoryService} from './hal-resource/resource-object-property-factory.service'; @NgModule({ imports: [ HttpClientModule ], providers: [ ResourceService, CollectionResolverService, ResourceDescriptorResolverService, ResourceObjectResolverService, ItemCacheService, ResourceSchemaService, ResourceObjectPropertyFactoryService ] }) export class HalNavigatorModule { static forRoot(configuration: ModuleConfiguration, descriptorResolverFactory?: () => ResourceDescriptorProvider, descriptorResolverDeps?: any[]): ModuleWithProviders<HalNavigatorModule> { const factory = descriptorResolverFactory ? descriptorResolverFactory : (schemaService: ResourceSchemaService) => new DefaultDescriptorProvider(configuration, schemaService); const deps = descriptorResolverDeps ? descriptorResolverDeps : [ResourceSchemaService]; return { ngModule: HalNavigatorModule, providers: [{ provide: MODULE_CONFIG, useValue: configuration }, { provide: ResourceDescriptorProvider, useFactory: factory, deps }, ResourceObjectPropertyFactoryService] }; } }
#include<cstdio> #include<cstring> using namespace std; int main() { int n; int ans=0; scanf("%d",&n); if(n%3==0) ans=n/3*2; else ans=n/3*2+1; printf("%d\n",ans); }
<gh_stars>0 /*此类自动生成,请勿修改*/ package template /*神盾尖刺配置*/ type ShieldTemplateVO struct { //id Id int `json:"id"` //下一个id NextId int32 `json:"next_id"` //阶数 Number int32 `json:"number"` //护体仙羽名称 Name string `json:"name"` //升阶成功率 UpdateWfb int32 `json:"update_wfb"` //升阶所需银两 UseSilver int32 `json:"use_silver"` //升阶所需元宝 UseGold int32 `json:"use_gold"` //升阶所需绑元 UseBindGold int32 `json:"use_bindgold"` //升级所需物品 UseItem int32 `json:"use_item"` //升阶所需物品数量 ItemCount int32 `json:"item_count"` //最小次数 TimesMin int32 `json:"times_min"` //最大次数 TimesMax int32 `json:"times_max"` //每次培养增加的进度最小值 AddMin int32 `json:"add_min"` //每次培养增加的进度最大值 AddMax int32 `json:"add_max"` //属性加成 Attr int32 `json:"attr"` //前端显示的进度值 NeedRate int32 `json:"need_rate"` //模型ID ModelId int32 `json:"model_id"` }
def fragment_size(self): if self._fragment_size is None: self._fragment_size = self.pyeclib_driver.get_segment_info( self.ec_segment_size, self.ec_segment_size)['fragment_size'] return self._fragment_size
// OnWorkerOffline implements MasterImpl.OnWorkerOffline func (m *MockMasterImpl) OnWorkerOffline(worker WorkerHandle, reason error) error { m.mu.Lock() defer m.mu.Unlock() m.onlineWorkerCount.Sub(1) args := m.Called(worker, reason) return args.Error(0) }
def addErrback(self, f): self._errbacks.append(f)
// ****************************************************************************** // File : UUID.h // Description : Generates the Universal unique ID integer // Project : iKan : Core // // Created by Ashish on 01/05/21. // Copyright © 2021 Ashish. All rights reserved. // ****************************************************************************** #pragma once namespace iKan { // ****************************************************************************** // "UUID" (universally unique identifier) or GUID is (usually) a 128-bit integer // used to "uniquely" identify information. In iKan Engine, even though we use // the term GUID and UUID, at the moment we're simply using a randomly // generated 64-bit integer, as the possibility of a clash is low enough for now. // This may change in the future. // ****************************************************************************** class UUID { public: UUID(); UUID(uint64_t uuid); UUID(const UUID& other); operator uint64_t () { return m_UUID; } operator const uint64_t () const { return m_UUID; } private: uint64_t m_UUID; }; } namespace std { // ****************************************************************************** // Userdefined Hash ID // ****************************************************************************** template <> struct hash<iKan::UUID> { std::size_t operator()(const iKan::UUID& uuid) const { return hash<uint64_t>()((uint64_t)uuid); } }; }
To subscribe to Capitol Fax, click here. A good way and a bad way Monday, Apr 27, 2015 * Erickson… During a hearing before lawmakers Wednesday, the new head of the state’s economic development agency offered up a recipe for how he’s going to lure more companies to Illinois. Jim Schultz, an Effingham entrepreneur tapped by Rauner to run the Illinois Department of Commerce and Economic Opportunity, said Illinois has many “hidden assets” that he’s going to use in his quest to replace jobs lost in recent years. […] He said he wants to go to drought-affected California and tell manufacturers what Illinois can offer. “Come to our state, I’ll give you our water. We have unlimited water,” Schultz said, pointing to the Mississippi, Illinois and Ohio rivers. […] “We have so many great hidden assets. We just haven’t optimized them,” Schultz said. “My focus is to go out and market this state.” 1) It’s nice to finally see a Rauner appointee not running down this state. 2) Water is, indeed, a major Illinois asset. Check out this list of the nine most drought-endangered states. Lots of Illinois competitors on there, including Texas. * The lesson here is that the governor and his people don’t always have to harp on the union issue when it comes to economic development. As I told subscribers several days ago, the governor’s anti-union local resolution efforts are doing for unions what they haven’t been able to do for themselves: get organized locally. Here’s Doug Wilson in the Quincy Herald-Whig… Adams County Board members voted April 15 to table their resolution supporting Rauner’s “Turnaround Agenda” after union members wanted to speak and the board’s one-week-early sign-up period for speakers was challenged. Board Chairman Les Post expects the vote will occur next month. So what will this vote do? It won’t really put any pressure on the Legislature. Speaker Michael Madigan and Senate President John Cullerton, a pair of Chicago Democrats, have control over whether a right-to-work bill comes up for a vote. They’re never going to allow a vote. What it will do is energize the unions, which will want to get more politically involved and get their people in office. * Related… * Cahill: How exactly is privatization better, Mr. Governor? * Chicago Sun-Times Editorial: Exelon’s rate-hike proposal is a bad bill * Schoenburg: Downstate representation gone from Illinois Commerce Commission - Posted by Rich Miller 40 Comments Sorry, comments for this post are now closed.
Ebola revisited: lessons in managing global epidemics. The latest statistics for the number of new cases of Ebola virus disease (EVD) in West Africa point to the near containment of the virus. While the current threat will not be deemed over until 42 days after the last case to be diagnosed has twice tested negative, there is now a shift in focus from an emphasis on containment to that of policy review and capacity building in light of lessons learned. This article primarily focuses on Sierra Leone. It revisits the issues surrounding the epidemic, seeking to summarise both the negative and positive aspects of the response at local and global levels, as well as highlights fresh perspectives from healthcare workers in the field for the management of similar epidemics.
/** * Parse an attribute value * @param value the value as a String * @param type the type of the parsed value * @param errors ActionErrors to which any parse errors are added * @return the parsed value */ public static Object parseValue(String value, Class<?> type, ActionMessages errors) { try { return ConstraintValueParser.parse(value, type); } catch (ParseValueException ex) { errors.add(ActionErrors.GLOBAL_MESSAGE, new ActionMessage("errors.message", ex.getMessage())); return null; } }
def make_a_leaf_cluster(self, texp): if texp > TestRoot.MAX_T: raise RuntimeError( "THIS WON'T WORK: texp is %d but may not exceed MAX_T=%d" % ( texp, TestRoot.MAX_T)) key = self.rng.some_bytes(8) value = self.rng.some_bytes(16) leaves = [] count = 1 << texp mask = count - 1 shift = 8 - texp if texp < 8: mask <<= shift key = self.rng.some_bytes(8) value = self.rng.some_bytes(16) for ndx in range(count): slot_nbr = ndx << shift mykey = bytearray(8) mykey[:] = key myval = bytearray(16) myval[:] = value mykey[0] &= ~mask mykey[0] |= slot_nbr mykey = bytes(mykey) myval[0] &= ~mask myval[0] |= slot_nbr myval = bytes(myval) leaf = Leaf(mykey, myval) leaves.append(leaf) return leaves
#!/usr/bin/env python from collections import deque import itertools as it import sys import math import re sys.setrecursionlimit(10000000) while True: n, m = map(int, raw_input().split()) if n + m == 0: break ls = [] for i in range(n): N, M = raw_input().split() N = N.replace("*", ".") ls.append((N, int(M))) ans = 0 for i in range(m): B = raw_input() for N, M in ls: if re.search(N, B): ans += M print ans
Control-oriented energy-profiling and modelling of urban electric vehicles Electric vehicles (EVs) are attracting more and more attention as a means to reach the desired reduction in transport-induced greenhouse emissions. To ensure an effective energetic management of these vehicles, especially in urban areas, dedicated control strategies are needed that correctly deal with the constraints related to the battery usage while preserving a good driving feeling. A preliminary step for the design of an effective controller is to get a reliable model of the vehicle dynamics of interest and thorough understanding of the energy flows in the vehicle itself. To this end, this paper proposes a systematic way to perform an energy-profiling of urban EVs and addresses the modelling and identification steps needed to achieve a control-oriented description of the vehicle longitudinal dynamics. Validation results are provided from data measured on a light electric two-wheeled vehicle.
/** * Construct a HpcNotificationTrigger object from string. * * @param notificationTriggerStr The notification trigger string. * @return HpcNotificationTrigger object */ private HpcNotificationTrigger fromNotificationTriggerString(String notificationTriggerStr) { HpcNotificationTrigger notificationTrigger = new HpcNotificationTrigger(); for (String trigger : notificationTriggerStr.split(",")) { HpcEventPayloadEntry payloadEntry = new HpcEventPayloadEntry(); payloadEntry.setAttribute(trigger.substring(0, trigger.indexOf('='))); payloadEntry.setValue(trigger.substring(trigger.indexOf('=') + 1)); notificationTrigger.getPayloadEntries().add(payloadEntry); } return notificationTrigger; }
CLOSE SIde-by-side comparison of online videos where Dr. Muni Sheldon Polsky and Dr. Michael T. Trombley both claim to have written the same book on erectile dysfunction. Buy Photo Physicians E.D. Center of Ohio in Sharonville is among a chain of erectile dysfunction clinics that is headed by a fraud convict, an Enquirer investigation has uncovered. The Enquirer/Meg Vogel (Photo: The Enquirer/Meg Vogel)Buy Photo A national chain of erectile dysfunction clinics is headed by a man who spent time in federal prison for fraud and who has been called a con man and a scam artist by the Federal Trade Commission, an Enquirer investigation has uncovered. A review of internal documents, emails and copies of training videos shows that management with Physicians E.D. – which has a clinic in Sharonville – has: • Suggested firing physicians who voice concerns that treatment protocol could be dangerous for patients. • Pushed employees to sell 12 to 18 months’ worth of medication, costing upwards of $6,000. Former employees say the company has focused more on pushing sales than on patients’ medical needs. • Paid its staffers between 4 percent and 7 percent commission on medication sales. • And trained its staff to tell patients the medication works “100 out of 100 times.” Physicians E.D. materials feature prominently a man identified as “Rick N.” The Enquirer forwarded video stills of the man to two federal authorities, who confirmed that he is Richard C. Neiswonger, whose legal battles began in 1996 with federal agencies for deceptive business practices and get-rich-quick schemes. Neiswonger has been sued several times in various federal courts, did federal prison time in the late 1990s on fraud charges and is currently awaiting sentencing after pleading guilty in 2012 to mail fraud and conspiracy to defraud the U.S. government. A screenshot of a training video shows “Rick N.,” who The Enquirer has learned is Richard Neiswonger. Neiswonger has served time in federal prison on a fraud conviction and is currently awaiting sentencing for another conviction. (Photo: Provided) Several former employees said that they didn’t know about Neiswonger’s criminal past until after they were fired or laid off. Some said they were uncomfortable with the company’s sales tactics but stayed because of the hefty paychecks. “I had patients all the time who wanted to return the medication, and we’d say ‘you can’t,’” said Matt Jones, a medic hired to work in the Kansas City clinic. “That’s not why I got into this field. I’m honestly glad they got rid of me, because I can’t be a part of that.” Clinic representatives say that their top priority is patient care. They’ve successfully treated thousands of men nationwide, they say, and the concerns raised to The Enquirer come from disgruntled past employees. Neiswonger was permanently banned in 1997 from engaging in any type of deceptive telemarketing by the FTC. He’s since been sued for contempt twice by the agency – once for violating the permanent injunction, and again for violating a 2008 civil contempt order. He also was involved with another series of erectile dysfunction clinics that is being sued by the Massachusetts attorney general. Solomon Wisenberg, Neiswonger’s attorney, told The Enquirer that his client is one of several people involved in operating Physicians E.D., and that it’s natural for a man with a criminal record not to want to disclose that past to consumers. “There’s no disclosure requirement that I’m aware of when somebody pled guilty 15 or 20 years ago to fraud and has paid his debt to society,” Wisenberg said. “Rick has admitted that he’s made mistakes in the past. He’s trying to turn his life around, and as long as he’s not required to reveal, I don’t see why he would.” Wisenberg acknowledged that Neiswonger hasn’t “paid his debt” for his most recent guilty pleas in Nevada, for which he’s yet to be sentenced. Neiswonger is cooperating with investigators in that case, his lawyer said, and is expected to provide helpful information to the government for an upcoming trial. Convict’s name kept off clinic’s filings NEWSLETTERS Get the News Alerts newsletter delivered to your inbox We're sorry, but something went wrong Be the first to be informed of important news as it happens in Greater Cincinnati. Please try again soon, or contact Customer Service at 1-800-876-4500. Delivery: Varies Invalid email address Thank you! You're almost signed up for News Alerts Keep an eye out for an email to confirm your newsletter registration. More newsletters The Enquirer began reporting on Physician E.D.’s advertisements in March. Those stories uncovered nearly a dozen complaints from patients at the Sharonville location who said they were subjected to high-pressure sales tactics for treatments that they ultimately didn’t want. Neiswonger’s name doesn’t appear on public filings connected with the Yale Clinic, which is the name of the parent company that operates Physicians E.D. in eight locations: Cincinnati; Pittsburgh; Kansas City; Fort Myers, Fla.; Cherry Hill, N.J.; Salt Lake City; Atlanta, and Hartford, Conn. It also operates the Huntington Medical Clinic in Metairie, La. Former employees interviewed by The Enquirer said Neiswonger is one of the outfit’s bosses, and “Rick N.” is listed as director of marketing on company directories provided by one of the former employees. Neiswonger also appears in a training video, introducing himself by his full name. The FTC declined to comment for this story. In past news releases, the agency has called Neiswonger a “business opportunity con artist” and a “scammer” with a history of deceiving consumers. Neiswonger reached a settlement in 2011 with the FTC that required him to surrender his Las Vegas home, valued at more than $1 million. The agency has banned Neiswonger from misrepresenting or failing to disclose “material facts” when promoting any business program. Wisenberg said Neiswonger isn’t violating the FTC’s permanent injunction. “He’s entitled to earn a living, he does it properly, and everybody deserves a chance to do that,” Wisenberg said. Past employees interviewed by The Enquirer said the business is focused more on making money than patient care – an allegation the clinic’s medical director denies. The employees pointed to concerns some physicians and medics raised about the company’s guidelines about treating men with elevated blood pressure. Treating hypertensive patients for erectile dysfunction isn’t on its own problematic, but the antidotes used to reverse erections that last too long are known to raise blood pressure. In June, Physicians E.D. disseminated guidelines that call for doctors to treat patients with blood pressures as high as 220/120 – far higher than the threshold for hypertension, which is generally considered to be 120/80. Daniel Meade, 23, of West Chester, is trained as a paramedic and worked with Physicians E.D. until early June. He said that physicians and medics who voiced concerns about treating hypertensive patients were threatened with firing. “If I, as a paramedic, come up to a scene and someone’s 219/119, we’re going really quickly to a hospital, because he’s at risk for a stroke,” he said. “At the clinic, he’s treated for ED.” In a written statement to The Enquirer, Physicians E.D. Medical Director Michael Trombley denied that hypertensive patients are put at risk by the clinic’s guidelines. “Patients with hypertensive emergency will ALWAYS be immediately directed to an ER and will NEVER be given ED treatments until that is resolved,” he said. Emails obtained by The Enquirer indicate that Neiswonger threatened to “weed out” and “terminate” physicians who refused to treat hypertensive patients. However, Trombley said that “no physician or medic has ever been fired due to voicing concerns over a treatment protocol that could be dangerous for patients. In fact, it is actually the opposite, a physician or medic would be potentially fired for NOT following safety treatment protocols that could possibly endanger patient care.” ‘It’s all a staged performance’ Employees of Physicians E.D. are shown training videos on dealing with patients. In one video, a Cincinnati staffer describes how he interacts with patients. “It’s all a staged performance meant to boost my sales,” Matt Kochersperger says in the video. Later, Kochersperger says he asks each patient to see their driver’s license. “I do that so we have an ID, but I’m also doing that so I can see what credit cards he has in his wallet,” he says. Kochersperger declined to talk to The Enquirer, referring questions to Physicians E.D.’s corporate office. In a provided audio recording of another video, Neiswonger gives step-by-step instructions to staffers on how to close a sale. The patient “is sitting there with an erection for perhaps the first time ever ... How hard of a sale could this really be?” Neiswonger says. “Not very hard. Our job is to get him to enroll in our treatment plan for a long period of time, 12 to 18 months, so he can properly restore function.” Neiswonger instructs staffers to tell patients that they should be having sex three to five times a week, to assure them that the penile injections are better than pills such as Viagra, and to say that the treatment is effective “100 out of 100 times.” Roy Guthrie is one of the former employees who spoke with The Enquirer. He ran the company’s Pittsburgh clinic until he was fired June 17. He said a series of Enquirer stories posted at Cincinnati.com caused him to start questioning the efficacy and safety of the medications being prescribed. “They were making these claims about the treatment, and at first, I thought, ‘OK, the doctors are on board, so this isn’t a problem.’ (The Enquirer’s) articles that came out made me raise an eyebrow,” he said. Part of his concern stemmed from staffers earning commission on sales, he said. The commission rate ranges from 4 percent to 7 percent, depending on the clinic’s overall sales for the week. That gives staffers incentive to push elderly patients into long-term treatment plans they might not want or need, and the company’s contracts make refunds difficult, Guthrie said. After The Enquirer’s stories ran, Guthrie said that management told employees to downplay the issues raised and even branded one clinic with a different name – Huntington Men’s Clinic – to ensure that the stories didn’t pop up when potential customers did online research. “I started questioning things like that,” Guthrie said. ‘I said, ‘Why are we hiding? When a patient calls to complain and threatens to go to the (Better Business Bureau), why are we giving them their money back if we did nothing wrong?’” Not Neiswonger’s first ED clinic This isn’t Neiswonger’s first foray into the business of treating erectile dysfunction. He is mired in a legal battle related to a similar venture called Men’s Medical Clinic, another series of erectile dysfunction clinics that became the target of a class-action lawsuit filed in December. Massachusetts Attorney General Maura Healey earlier this month filed a lawsuit against Men’s Medical, accusing it of using deceptive marketing and high-pressure sales tactics. Healey’s complaint alleges that more than 4,000 consumers paid more than $5 million to one clinic in Framingham, Mass. Neiswonger isn’t publicly attached to Men’s Medical, but, in a civil suit filed in Orange County, Fla., his wife claims that she is equal owners with medical director Kevin Hornsby. Shannon Neiswonger accuses Hornsby in the suit of “fraud and deceit” designed to steal her ownership in the “profitable and successful” business. In an email to coworkers, Neiswonger in May referenced a settlement between Hornsby and his wife. He wrote that the Yale Clinic was set to “acquire the following assets and markets,” and listed Washington, D.C., Detroit, Cleveland and Philadelphia. He closed the email with, “Our plate is full.” However, the Florida suit indicates that the settlement deal fell through. The next hearing on the matter is set for July 21. After The Enquirer’s stories about Physicians E.D. ran, the one-time face of that business – Muni Sheldon Polsky, a urologist in Florida – was replaced in June by Trombley, a family practitioner licensed in North Carolina. Polsky refused to comment on his departure to The Enquirer. “Why would I tell you personal things about myself?” he asked, before hanging up the phone. Polsky’s image, name and introductory videos were stripped from the Physicians E.D. websites and replaced by Trombley’s. One video features Trombley reciting the same script, word for word, that Polsky had – right down to claiming that he wrote a pamphlet called “Seven Secrets Doctors and Drug Companies Don’t Want You to Know About Erectile Dysfunction.” (Watch a video showing the similarity at Cincinnati.com.) High-pressure sales, controversial treatments Trombley serves as the physician face of the company, but each location also has one or two locally hired physicians on staff as well. Meade said that those physicians take a medical history of each incoming patient, but the penile injection is administered by an on-site paramedic or emergency medical technician. If the patient has an adverse reaction to the injection, staffers are instructed to call Yale headquarters, where David Vitelli serves as national medical operations director. Customers who aren’t satisfied with the medication or who try to back out of the purchase are largely rebuffed, according to past employees. In one email provided to The Enquirer, managers discussed a patient who complained that the medication he ordered arrived after its expiration date. He was told that a refund wasn’t possible, according to the email. In another email, Vitelli threatened to fire a physician who had concerns about andropause cream – a controversial treatment that Trombley said is “helpful for erections and overall well-being.” The physician didn’t believe the cream was beneficial and wasn’t selling enough of it, Vitelli wrote in the email. “Unfortunately, if he can not push it, it will be a big problem. I may have to replace him,” Vitelli wrote. In response to a question about the email, a company spokesman released this statement: “David Vitelli adamantly denies ever stating that a physician would be terminated for not ‘pushing’ any particular medication or treatment.” Meade, the West Chester paramedic, formerly worked for Vitelli as national medical operations manager. He said the clinic buys the cream for $160 from Olympia Pharmacy in Orlando, and sells it to patients for $900 – a 463 percent markup. “It was all about selling the product, about making money,” former employee James Johnson told The Enquirer. “That was their sole purpose.” Johnson worked at the company’s headquarters before it relocated last month to Pennsylvania. He sat in the call center, he said, and listened to employees pitching the treatment to customers nationwide. “Every week, once a morning, we’d have a staff phone call, and it was all sales, sales, sales. No talk of helping the patient,” said Johnson, who described his eventual departure a “relief.” “I knew this was definitely not good for the old people. It was a sham. It was a time-share, high-pressure sale that a lot of these poor old guys were getting stuck with.” Trombley said that because of The Enquirer’s queries, the clinic is examining its marketing material, manuals and handouts to make sure all information is accurate. For example, he said, it is nixing wording that instructs staffers to tout the treatment as being effective “100 out of 100 times.” “We never want information to be construed as misleading,” he said. Richard Neiswonger: A Timeline After working as a semi-professional magician through the ‘60s and ‘70s under the stage names “Slick Rick” and “Ricky Penn,” Neiswonger later had a magical variety show with his wife called “Captain Space and Solar Sue.” He later switched his focus to businesses that ran afoul of the Federal Trade Commission and other federal authorities. Here’s a sampling of his run-ins with law enforcement, beginning in the 1990s: • 1996: The Federal Trade Commission targets Neiswonger and several businesses he started for fraud and deceptive practices. The agency says Neiswonger marketed and sold business training courses throughout the U.S. The courses cost up to $12,900 and representatives promised clients that they’ll ultimately earn $150,000 a year. In reality, many clients don’t recoup the cost for the courses, much less earn the “doctor’s income” that Neiswonger promised. • 1997: The FTC issues a permanent injunction banning Neiswonger from engaging in deceptive telemarketing. The injunction prohibits Neiswonger from misrepresenting or failing to disclose “material facts” when promoting any business program. • 1998: Neiswonger is sentenced to 18 months in federal prison after pleading guilty to money laundering and wire fraud charges. • 2006: The FTC files to freeze Neiswonger’s assets, accusing him of violating the permanent injunction through his involvement with Asset Protection Group, Inc., which charged clients $10,000 to hide their financial assets from the IRS and other government agencies, creditors and courts. Neiswonger’s cohort in that eight-year venture is William Reed, a former Colorado lawyer. Richard Neiswonger to forge his wife’s signature on an operating agreement in late 2013. About this story This story came about because of a complaint filed with Enquirer Call For Action reporter Amber Hunt. For help with consumer mediation, call our hotline from 11 a.m.-1 p.m. Monday through Friday at 513-768-8833 or fill out this form online at https://www.cincinnati.com/callforaction. • 2007: The FTC announces that Neiswonger is banned for life from selling any type of business program in the future. In a news release, the agency calls him a “scam artist” who falsely claimed consumers could make six-figure incomes by buying his $10,000 business program. • July 2011: Neiswonger, Reed and accountant Wendell Waite are federally indicted on multiple charges, including conspiracy to defraud, mail fraud, wire fraud and money laundering. They’re accused of making more than $60 million in the asset protection scheme. Reed pleads guilty in 2012 to conspiracy to defraud the United States, aggravated identity theft and evasion of tax payment. He is sentenced to 9 years in prison. • November 2011: Neiswonger and his second wife Shannon are indicted alongside Colorado attorney Robert McAllister on financial fraud charges. Neiswonger is accused of circumventing the FTC’s asset freeze by transferring money to McAllister from accounts over which Shannon Neiswonger had control. Separately, McAllister also is accused of embezzling $1 million of the money the Neiswongers transferred to him for safekeeping. (McAllister ultimately was disbarred and sentenced to more than six years in prison.) • 2012: Neiswonger pleads guilty to fraud charges in both the Reed and McAllister cases. His sentencing date has been repeatedly adjourned. His next hearing in the case is set for December. • 2009-2015: Neiswonger is involved in two separate nationwide chains of erectile dysfunction clinics that operate under multiple names, including Yale Clinic, Physicians E.D. and Huntington Men’s. Neiswonger also is involved in Men’s Medical Clinics, which court documents indicate was launched in 2009. Earlier this month, Massachusetts Attorney General Maura Healey sued that clinic and its director, Kevin Hornsby, for allegedly engaging in “a panoply of unlawful, unfair and deceptive acts and practices.” A lawsuit filed by Neiswonger’s wife Shannon is still pending in a Florida court. In that suit, Shannon (nee Shaffer) claims that she is equal partners with Hornsby and that she “has always been the managing member of the business.” It also claims that Hornsby convinced Richard Neiswonger to forge his wife’s signature on an operating agreement in late 2013. Read or Share this story: http://cin.ci/1MD1aIk
<reponame>fujaba/SDMLib package org.sdmlib.models.classes.templates; import org.sdmlib.codegen.Parser; import org.sdmlib.models.classes.ClassModel; import de.uniks.networkparser.list.SimpleList; public class ExistTemplate extends TemplateTask{ public SimpleList<TemplateTask> templates=new SimpleList<TemplateTask>(); public ExistTemplate() { } public ExistTemplate(String template) { withTemplate(template); } public ExistTemplate withTemplates(Template... values) { if(values == null) { return this; } for(Template template : values) { templates.with(template); } return this; } @Override public TemplateResult execute(String searchString, Parser parser, ClassModel model, String... values) { TemplateResult text=new TemplateResult(template); boolean added=false; for(TemplateTask template : templates) { TemplateResult sub = template.execute(template.getTemplate(), parser, model, values); if(text.append(sub)) { added = true; } } if(!added) { return null; } return text; } }
/** A split point in a method that can be split off into another method */ public static class SplitPoint { /** * The locals read in this split area, keyed by index. Value type is always int, float, long, double, or object. */ public final SortedMap<Integer, Type> localsRead; /** * The locals written in this split area, keyed by index. Value type is always int, float, long, double, or object. */ public final SortedMap<Integer, Type> localsWritten; /** * The values of the stack needed at the start of this split area. Type is always int, float, long, double, or * object. */ public final List<Type> neededFromStackAtStart; /** * The values of the stack at the end of this split area that are needed to put back on the original. Type is always * int, float, long, double, or object. */ public final List<Type> putOnStackAtEnd; /** * The instruction index this split area begins at. */ public final int start; /** * The number of instructions this split area has. */ public final int length; public SplitPoint(SortedMap<Integer, Type> localsRead, SortedMap<Integer, Type>localsWritten, List<Type> neededFromStackAtStart, List<Type> putOnStackAtEnd, int start, int length) { this.localsRead = localsRead; this.localsWritten = localsWritten; this.neededFromStackAtStart = neededFromStackAtStart; this.putOnStackAtEnd = putOnStackAtEnd; this.start = start; this.length = length; } }
/** * * @param deviceIndex * @param config optional custom configuration, matching the implementation, i.e. {@link StereoDeviceConfig.GenericStereoDeviceConfig}. * @param verbose * @return */ public final StereoDevice createDevice(final int deviceIndex, final StereoDeviceConfig config, final boolean verbose) { final StereoDevice device = createDeviceImpl(deviceIndex, config, verbose); if( null != device ) { addDevice2List(device); } return device; }
// Heuristic uses Euclidian (straight line) distance class AstarEuclidHeuristic { public: template <class GraphType> static float calculate(const GraphType* pGraph, const int pNode1index, const int pNode2index) { return glm::distance(pGraph->getNode(pNode1index).getPosition(), pGraph->getNode(pNode2index).getPosition()); } }
from jira_requests import jira_requests import json import pprint class asset_tracker_restapi: def __init__(self, username=None, password=None): self.my_request = jira_requests(username=username, password=password) def add_asset(self, asset_dict): # This is only for testing, it was used to generate a large number of items json_dict = {'objectTypeId': asset_dict['object_type_id'], # 'categoryName': item_dict['category'], 'attributes': [ {'objectTypeAttributeId': 2, 'objectAttributeValues': [{'value': asset_dict['asset_name']}]}, # {'objectTypeAttributeId': item_dict[''], 'value': [item_dict['asset_name']]}, {'objectTypeAttributeId': 5, 'objectAttributeValues': [{'value': asset_dict['tracking_hist']}]}, # Tracking Hist {'objectTypeAttributeId': 6, 'objectAttributeValues': [{'value': asset_dict['rad_info']}]}, # Tracking Hist # {'typeName': 'system.description', 'values': [item_dict['asset_desc']]}, {'objectTypeAttributeId': 7, 'objectAttributeValues': [{'value': True}]} # {'typeName': 'nexo.asset.shipping.origin', 'values': [item_dict['origin']]}, # {'typeName': 'nexo.asset.image', 'values': [item_dict['asset_image']]} ] } #print(json_dict) inserted_asset = self.my_request.post_data("object/create", json_dict) print(inserted_asset.json()) return inserted_asset def upload_file_to_object(self, asset_id, file_object, comment): # Upload a file to an asset/object with a comment operator = "attachments/object/" + str(asset_id) file_added = self.my_request.post_file_upload(operator, file_object, comment) return asset_id def get_file_from_object(self, asset_id, file_url): # Request a single file from an asset response = self.my_request.get_data_file_url(file_url) return response def get_asset_attachments(self, asset_id): # Get a list of all file attachments the asset has asset_attachments = self.my_request.get_data("attachments/object/%s" % str(asset_id)) return asset_attachments.json() def build_json_assets_dict(self, asset_dict): # This builds up the json for creating or updating an object/asset from a dict of attributes. json_dict = {} attribute_list = [] json_dict.update({'objectTypeId': asset_dict['object_type_id']}) for asset_attribute in asset_dict: # Iterate though all the attributes so we can bunch them together into a dict of value lists if asset_attribute != "object_id" and asset_attribute != "object_type_id": attribute_id = self.get_attribute_id_from_name(asset_dict['object_type_id'], asset_attribute) attribute_list.append({'objectTypeAttributeId': attribute_id, 'objectAttributeValues': [{'value': asset_dict[asset_attribute]}]}) json_dict.update({'attributes': attribute_list}) return json_dict def add_asset_from_dict(self, asset_dict): # Add an asset, this is processed via a form dictionary with the appropriate information for the object schema json_dict = self.build_json_assets_dict(asset_dict) # Inserts are run through a POST request inserted_asset = self.my_request.post_data("object/create", json_dict).json() return inserted_asset['id'] def update_asset_from_dict(self, asset_dict): # Update an asset via a dict that is passed in from a form json_dict = self.build_json_assets_dict(asset_dict) # Updates are required to go through a PUT request updated_asset = self.my_request.put_data("object/%s" % str(asset_dict['object_id']), json_dict).json() return updated_asset['id'] def get_attribute_id_from_name(self, object_type_id, attribute_name): # If we pass an attribute name then we can get it's corresponding ID attribute_dict = self.get_object_type_attributes(object_type_id) for attribute in attribute_dict: if attribute['name'] == attribute_name: return attribute['id'] print("Could not find attribute id for attribute name : %s" % attribute_name) return 0 def get_object_types_for_schema(self, schema_id): # Get all available object types / catagories for assets that are under a specific scehma object_types = self.my_request.get_data("objectschema/%s/objecttypes/flat" % str(schema_id)) return self.parse_object_types_list(object_types) def parse_object_types_list(self, response): object_types_list = [] output = response.json() for my_object_type in output: if 'description' not in my_object_type: my_object_type.update({'description': ""}) object_type_dict = {'id': my_object_type['id'], 'name': my_object_type['name'], 'description': my_object_type['description']} object_types_list.append(object_type_dict) return object_types_list def get_object_type_attributes(self, object_type_id): object_type_attributes = self.my_request.get_data("objecttype/%s/attributes" % str(object_type_id)) # pprint.pprint(object_type_attributes.json()) if object_type_attributes == 0: return 0 return self.parse_object_type_attributes(object_type_attributes) def parse_object_type_attributes(self, response): object_type_attributes_list = [] output = response.json() for my_object_attrib in output: object_type_attributes_dict = {'id': my_object_attrib['id'], 'name': my_object_attrib['name'], 'hidden': my_object_attrib['hidden'], 'editable': my_object_attrib['editable'], 'field_type': my_object_attrib['defaultType']['name'], 'options': my_object_attrib['options'].split(',')} if 'description' in my_object_attrib: object_type_attributes_dict.update({'description': my_object_attrib['description']}) object_type_attributes_list.append(object_type_attributes_dict) return object_type_attributes_list def search_assets(self, field, search_text): '''Search for asset using field and value for field as input''' asset_query = field + '=' + search_text params_dict = {'query': asset_query} assets = self.my_request.get_data("search", params_dict) return assets def get_asset_field_from_json_by_name(self, entry_name, asset_data, entry_value='value'): my_entry_value = "" for entry in asset_data: # asset data is a list of dict's so we need to iterate over to find the Name entry if entry['name'] == entry_name: my_entry_value = entry[entry_value] break return my_entry_value def get_asset_object_type_by_id(self, asset_id): operator = "object" + "/" + str(asset_id) response = self.my_request.get_data(operator) if response == 0: return 0 output = response.json() return output['objectType']['id'] def get_asset_by_id(self, asset_id): operator = "object" + "/" + str(asset_id) asset = self.my_request.get_data(operator) if asset != 0: return self.parse_asset(asset) else: return 0 def parse_asset(self, response): '''Take in a response request asset from (JIRA) and parse the data into a single easy to read list of dictionary''' output = response.json() asset_list = [] for attribute in output['attributes']: asset_dict = {'id': attribute['objectTypeAttribute']['id'], 'name': attribute['objectTypeAttribute']['name'], 'editable': attribute['objectTypeAttribute']['editable'], 'field_type': attribute['objectTypeAttribute']['defaultType']['name'], 'value': attribute['objectAttributeValues'][0]['value'] } if 'description' in attribute['objectTypeAttribute']: asset_dict.update({'description': attribute['objectTypeAttribute']['description']}) asset_list.append(asset_dict) return asset_list def get_object_attribute_history(self, asset_id, object_attribute): asset_history_list = [] operator = "object" + "/" + str(asset_id) + "/history" full_asset_hist = self.my_request.get_data(operator).json() for my_asset_hist in full_asset_hist: asset_history_dict = {} if 'affectedAttribute' in my_asset_hist: if my_asset_hist['affectedAttribute'] == object_attribute: asset_history_dict.update({'person_fullname': my_asset_hist['actor']['displayName']}) asset_history_dict.update({'person_username': my_asset_hist['actor']['name']}) asset_history_dict.update({'change_date': my_asset_hist['created']}) asset_history_dict.update({'old_value': my_asset_hist['oldValue']}) asset_history_dict.update({'new_value': my_asset_hist['newValue']}) asset_history_list.append(asset_history_dict) return asset_history_list def get_asset_connected_tickets(self, asset_id): operator = "objectconnectedtickets" + "/" + str(asset_id) + "/tickets" asset_tickets = self.my_request.get_data(operator).json() return asset_tickets['tickets'] def action_id_to_action(self, action_id): action = "" if action_id == 1: action = "Recieved" elif action_id == 2: action = "Shipped" elif action_id == 3: action = "Cleaned" elif action_id == 4: action = "Out for a beer" return action def test(self): print("Hello!, username is : %s, password is : %s" % (self.my_request.username, self.my_request.password))
# -*- coding: future_fstrings -*- import threading from flask_mail import Message from flask import render_template, current_app from app.extensions import mail def send_reset_password(user, pin, callback_url): send(f'{pin} is your Typer account recovery code', sender = current_app.config['MAIL_USERNAME'], recipients = [user.email], html = render_template( 'email.html', user=user, pin=pin, callback_url=callback_url) ) def send_email(app, msg): with app.app_context(): mail.send(msg) def send(subject, sender, recipients, html): message = Message(subject, sender=sender, recipients=recipients) message.html = html threading.Thread( target=send_email, args=(current_app._get_current_object(), message,) ).start()
<filename>src/util.h #pragma once #include "impacto.h" #include <glm/glm.hpp> #include <glm/gtc/quaternion.hpp> #include <algorithm> #include <SDL_stdinc.h> #include <string> // TODO own _malloca for gcc #if defined(_malloca) #define ImpStackAlloc _malloca #define ImpStackFree _freea #else #define ImpStackAlloc malloc #define ImpStackFree free #endif namespace Impacto { glm::mat2 Rotate2D(float angle); struct Rect; struct RectF { float X; float Y; float Width; float Height; RectF(); RectF(float x, float y, float width, float height); RectF(Rect const& rect); glm::vec2 Center() const; // Rect is rotated around center bool ContainsPoint(glm::vec2 point, float angle = 0.0f) const; }; struct Rect { int X; int Y; int Width; int Height; Rect(); Rect(int x, int y, int width, int height); Rect(RectF const& rect); glm::ivec2 Center() const; // Rect is rotated around center bool ContainsPoint(glm::ivec2 point, float angle = 0.0f) const; }; glm::vec2 DesignToNDC(glm::vec2 xy); RectF DesignToNDC(RectF const& rect); glm::vec4 RgbIntToFloat(uint32_t rgb); char* DumpMat4(glm::mat4* matrix, const char* columnSeparator = "\t", const char* rowSeparator = "\n"); // Thanks https://graphics.stanford.edu/~seander/bithacks.html#IntegerLog inline int Uint32Log2(uint32_t v) { unsigned int const b[] = {0x2, 0xC, 0xF0, 0xFF00, 0xFFFF0000}; unsigned int const S[] = {1, 2, 4, 8, 16}; int i; unsigned int r = 0; // result of log2(v) will go here for (i = 4; i >= 0; i--) // unroll for speed... { if (v & b[i]) { v >>= S[i]; r |= S[i]; } } return r; } // lots of guessing, again... inline glm::vec3 LookAtEulerZYX(glm::vec3 from, glm::vec3 to, glm::vec3 up = glm::vec3(0.0f, 1.0f, 0.0f)) { glm::vec3 result(0.0f); glm::vec3 forward = glm::normalize(from - to); result.x = atan2f(forward.y, sqrtf(forward.x * forward.x + forward.z * forward.z)); result.y = atan2f(forward.x, forward.z) - M_PI; return result; } // https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles#Euler_Angles_to_Quaternion_Conversion // + guessing at the order :) inline void eulerZYXToQuat(glm::vec3 const* zyx, glm::quat* quat) { float cosy = cos(zyx->z * 0.5f); float siny = sin(zyx->z * 0.5f); float cosr = cos(zyx->x * 0.5f); float sinr = sin(zyx->x * 0.5f); float cosp = cos(zyx->y * 0.5f); float sinp = sin(zyx->y * 0.5f); // This is currently slower than the scalar variant // TODO use sse swizzle instead of set #if defined(__SSE2__) && 0 __m128 a = _mm_set_ps(cosy, siny, cosy, cosy); __m128 b = _mm_set_ps(cosr, cosr, cosr, sinr); a = _mm_mul_ps(a, b); b = _mm_set_ps(cosp, cosp, sinp, cosp); a = _mm_mul_ps(a, b); b = _mm_set_ps(siny, cosy, siny, siny); __m128 c = _mm_set_ps(sinr, sinr, sinr, cosr); b = _mm_mul_ps(b, c); c = _mm_set_ps(sinp, sinp, cosp, sinp); b = _mm_mul_ps(b, c); const __m128 d = _mm_set_ps(1.0f, -1.0f, 1.0f, -1.0f); b = _mm_mul_ps(b, d); *(__m128*)quat = _mm_add_ps(a, b); #else quat->x = cosy * sinr * cosp - siny * cosr * sinp; quat->y = cosy * cosr * sinp + siny * sinr * cosp; quat->z = siny * cosr * cosp - cosy * sinr * sinp; quat->w = cosy * cosr * cosp + siny * sinr * sinp; #endif } inline float DegToRad(float deg) { return deg * (float)M_PI / 180.0f; } inline float RadToDeg(float rad) { return rad * 180.0f / (float)M_PI; } inline float NormalizeDeg(float deg) { deg = fmodf(deg + 180, 360); if (deg < 0) deg += 360; return deg - 180; } inline float NormalizeRad(float rad) { rad = fmodf(rad + (float)M_PI, 2.0f * (float)M_PI); if (rad < 0) rad += 2.0 * M_PI; return rad - M_PI; } inline bool StringEndsWith(std::string const& str, std::string const& ending) { if (str.length() < ending.length()) return false; return std::equal(ending.rbegin(), ending.rend(), str.rbegin()); } inline bool StringEndsWithCi(std::string const& str, std::string const& ending) { if (str.length() < ending.length()) return false; return 0 == SDL_strcasecmp(str.c_str() + str.length() - ending.length(), ending.c_str()); } } // namespace Impacto
import { Injectable } from '@angular/core'; import { ITurno } from '../models/turno.model'; import { AngularFirestoreCollection, AngularFirestore, AngularFirestoreDocument } from '@angular/fire/firestore'; import { environment } from 'src/environments/environment'; import { IUsuarioId } from '../models/usuarioid.model'; import { ITurnoId } from '../models/turnoid.model'; import { Observable } from 'rxjs'; import { map } from 'rxjs/operators'; import { Turno } from '../clases/turno'; import { IUsuario } from '../models/usuario.model'; import { Helpers } from '../clases/helpers'; import { IRangoHorario } from '../models/rangohorario.model'; import { Consultorios } from '../enums/consultorios.enum'; @Injectable({ providedIn: 'root' }) export class TurnosService { private collection: AngularFirestoreCollection<ITurno>; constructor(private afs: AngularFirestore) { this.collection = this.afs.collection(environment.db.turnos); } get Collection(): AngularFirestoreCollection<ITurno> { return this.collection; } static estaReservado(iturnoid: ITurnoId): boolean { return iturnoid.turno.reservado; } static estaEncuestado(iturnoid: ITurnoId): boolean { return iturnoid.turno.encuestado; } static aunNoAsiste(iturnoid: ITurnoId): boolean { return typeof iturnoid.turno.asistio === 'undefined'; } static seAsistio(iturnoid: ITurnoId): boolean { return iturnoid.turno.asistio === true; } static seFalto(iturnoid: ITurnoId): boolean { return iturnoid.turno.asistio === false; } static reservadoPorUsuario(iturnoid: ITurnoId, clienteUID: string): boolean { return (iturnoid.turno.clienteUID === clienteUID); } static esCancelablePorUsuario(iturnoid: ITurnoId, clienteUID: string) { return (TurnosService.aunNoAsiste(iturnoid) && TurnosService.reservadoPorUsuario(iturnoid, clienteUID)); } static completoEncuesta(iturnoid: ITurnoId, clienteUID: string) { return (iturnoid.turno.encuestado && TurnosService.reservadoPorUsuario(iturnoid, clienteUID)); } static esEncuestablePorUsuario(iturnoid: ITurnoId, clienteUID: string) { return ( iturnoid.turno.encuestado === false && TurnosService.seAsistio(iturnoid) && TurnosService.reservadoPorUsuario(iturnoid, clienteUID) ); } static generarId(nTime: Date, consultorio: Consultorios): string { const consultorioStr = Consultorios[consultorio].toString(); return consultorioStr + nTime.getTime(); } static DataDAO(iturno: ITurno): Turno { const fecha = iturno.time.toDate(); return new Turno(fecha); } static generarTurnosDisponiblesTodoElDia(dia: Date, consultorio: Consultorios, especialista: IUsuarioId): Array<ITurnoId> { const aux = new Date(dia); const rangoHorario: IRangoHorario = Helpers.traerRangoHorario(aux); const time: Date = new Date(rangoHorario.inicio); const iturnosid: Array<ITurnoId> = new Array<ITurnoId>(); const TiempoMinimoConsulta = environment.clinica.tiempoMinimoConsulta; do { const nTime = new Date(time); const consultorioStr = Consultorios[consultorio].toString(); const id = TurnosService.generarId(nTime, consultorio); const iturnoid = { id, turno: { time: nTime, consultorio: consultorioStr, especialistaUID: especialista.id, especialistaNombre: especialista.usuario.Nombre, reservado: false, encuestado: false, clienteUID: null, clienteNombre: null } as ITurno } as ITurnoId; iturnosid.push(iturnoid); time.setMinutes(time.getMinutes() + TiempoMinimoConsulta, 0, 0); } while (time <= rangoHorario.fin); return iturnosid; } //TODO averiguar como hacer esto correctamente public static tieneCliente(iturnoid: ITurnoId) { if (typeof iturnoid.turno.clienteUID === null || typeof iturnoid.turno.clienteNombre === null) { throw Error('El turno no tiene el cliente a quien se lo debe reservar '); } } public static tieneResena(iturnoid: ITurnoId) { if (typeof iturnoid.turno.resena === 'undefined') { throw Error('no tiene reseña'); } } public CargarResena(iturnoid: ITurnoId): Promise<void> { TurnosService.tieneResena(iturnoid); // actualizar el turno en el especialista return this.actualizarTurno(iturnoid).then(res => { // crear la reserva en el cliente return this.actualizarReserva(iturnoid); }); } public MarcarFalta(iturnoid: ITurnoId): Promise<void> { iturnoid.turno.asistio = false; // actualizar el turno en el especialista return this.actualizarTurno(iturnoid).then(res => { // crear la reserva en el cliente return this.actualizarReserva(iturnoid); }); } public MarcarEncuestado(iturnoid: ITurnoId): Promise<void> { iturnoid.turno.encuestado = true; // actualizar el turno en el especialista return this.actualizarTurno(iturnoid).then(res => { // crear la reserva en el cliente return this.actualizarReserva(iturnoid); }); } public Reservar(iturnoid: ITurnoId): Promise<void> { TurnosService.tieneCliente(iturnoid); iturnoid.turno.reservado = true; // actualizar el turno en el especialista return this.actualizarTurno(iturnoid).then(res => { // crear la reserva en el cliente return this.actualizarReserva(iturnoid); }); } public Cancelar(iturnoid: ITurnoId): Promise<void> { // eliminar la reserva en el cliente return this.eliminarReserva(iturnoid).then(res => { return this.cancelarTurno(iturnoid); }); } private cancelarTurno(iturnoid: ITurnoId): Promise<void> { return this.afs.collection(environment.db.usuarios) .doc<IUsuario>(iturnoid.turno.especialistaUID) .collection<ITurno>(environment.collections.usuarios.turnos) .doc(iturnoid.id) .update({ reservado: false, clienteUID: null, clienteNombre: null, }); } private eliminarReserva(iturnoid: ITurnoId): Promise<void> { TurnosService.tieneCliente(iturnoid); return this.afs.collection(environment.db.usuarios) .doc<IUsuario>(iturnoid.turno.clienteUID) .collection<ITurno>(environment.collections.usuarios.reservas) .doc(iturnoid.id).delete(); } private actualizarTurno(iturnoid: ITurnoId): Promise<void> { return this.afs.collection(environment.db.usuarios) .doc<IUsuario>(iturnoid.turno.especialistaUID) .collection<ITurno>(environment.collections.usuarios.turnos) .doc(iturnoid.id) .set(iturnoid.turno); } private actualizarReserva(iturnoid: ITurnoId): Promise<void> { TurnosService.tieneCliente(iturnoid); return this.afs.collection(environment.db.usuarios) .doc<IUsuario>(iturnoid.turno.clienteUID) .collection<ITurno>(environment.collections.usuarios.reservas) .doc(iturnoid.id) .set(iturnoid.turno); } traerAtencionesPorDiaEspecialista(diaConsultado: Date, especialistaUID: string): Observable<ITurnoId[]> { const rango: IRangoHorario = Helpers.traerRangoDeUnDia(diaConsultado); const colection: AngularFirestoreCollection<ITurno> = this.afs.collection(environment.db.usuarios) .doc<IUsuario>(especialistaUID) .collection<ITurno>(environment.collections.usuarios.turnos, ref => ref .where('time', '>', rango.inicio) .where('time', '<', rango.fin) .where('reservado', '==', true) .orderBy('time', 'desc') ); return this.makeObservable(colection); } traerPorDiaEspecialista(diaConsultado: Date, especialistaUID: string): Observable<ITurnoId[]> { const rango: IRangoHorario = Helpers.traerRangoDeUnDia(diaConsultado); const colection: AngularFirestoreCollection<ITurno> = this.afs.collection(environment.db.usuarios) .doc<IUsuario>(especialistaUID) .collection<ITurno>(environment.collections.usuarios.turnos, ref => ref .where('time', '>', rango.inicio) .where('time', '<', rango.fin) .orderBy('time') ); return this.makeObservable(colection); } traerReservasPorUsuario(usuarioUID: string): Observable<ITurnoId[]> { const colection: AngularFirestoreCollection<ITurno> = this.afs.collection(environment.db.usuarios) .doc<IUsuario>(usuarioUID) .collection<ITurno>(environment.collections.usuarios.reservas, ref => ref.orderBy('time') ); return this.makeObservable(colection); } private makeObservable(collection: AngularFirestoreCollection<ITurno>) { return collection.snapshotChanges().pipe( map(actions => { return actions.map(a => { const iturno = a.payload.doc.data() as ITurno; const id = a.payload.doc.id; return { id, turno: iturno } as ITurnoId; }); })); } }
//----------------------------------------------------------------------------- // LookupFunctionLatestVersion finds the latest cached version of an existing CordbFunction // in the given module. If the function doesn't exist, it returns NULL. // // Arguments: // funcMetaDataToken - methoddef token for function to lookup // // // Notes: // If no CordbFunction instance was cached, then this returns NULL. // use code:CordbModule::LookupOrCreateFunctionLatestVersion to do a lookup that will // populate the cache if needed. CordbFunction* CordbModule::LookupFunctionLatestVersion(mdMethodDef funcMetaDataToken) { INTERNAL_API_ENTRY(this); return m_functions.GetBase(funcMetaDataToken); }
import json import numpy as np import cv2 from pycocotools import mask as COCOmask def showMask(img_obj): img = cv2.imread(img_obj['fpath']) img_ori = img.copy() gtmasks = img_obj['gtmasks'] n = len(gtmasks) print(img.shape) for i, mobj in enumerate(gtmasks): if not (type(mobj['mask']) is list): print("Pass a RLE mask") continue else: pts = np.round(np.asarray(mobj['mask'][0])) pts = pts.reshape(pts.shape[0] // 2, 2) pts = np.int32(pts) color = np.uint8(np.random.rand(3) * 255).tolist() cv2.fillPoly(img, [pts], color) cv2.addWeighted(img, 0.5, img_ori, 0.5, 0, img) cv2.imshow("Mask", img) cv2.waitKey(0) def get_seg(height, width, seg_ann): label = np.zeros((height, width, 1)) if type(seg_ann) == list or type(seg_ann) == np.ndarray: for s in seg_ann: poly = np.array(s, np.int).reshape(len(s)//2, 2) cv2.fillPoly(label, [poly], 1) else: if type(seg_ann['counts']) == list: rle = COCOmask.frPyObjects([seg_ann], label.shape[0], label.shape[1]) else: rle = [seg_ann] # we set the ground truth as one-hot m = COCOmask.decode(rle) * 1 label[label == 0] = m[label == 0] return label[:, :, 0]
Christopher Meek knew on 9/11 that he would one day give back to the people who risk their lives on behalf of other Americans. Almost 16 years later, Meek gives back over and over again, helping many courageous citizens walk or stand, through his group that provides exoskeleton devices to paralyzed veterans. Get push notifications with news, features and more. “I was in the shadows of the Twin Towers on 9/11,” says Meek, 46, a finance officer who worked inside the World Trade Center. “One of my biggest memories of this day is the hundreds of people rushing into the buildings when others were running out.” The sight of the gallant rescuers left a profound impression. “I knew I had to give back to first responders,” Meek tells PEOPLE. Today, Meek and his organization, SoldierStrong, have donated or funded more than $2 million worth of medical devices, allowing 25,000 spinal cord-injured veterans access to equipment that helps them use their arms or legs. The appliances collectively are known as the Soldier Suit. They include high-tech robotic arms, a foot and an exoskeleton. The project is an outgrowth of an earlier effort to help soldiers in the field. A friend showed Meek a letter in 2009 from a deployed Marine whose unit lived in austere surroundings. Meek rallied to help, forming a group that shipped more than 50,000 pounds of baby wipes, bandages, socks and other items to overseas forces. In 2012, with less need to support deployed troops, Meek looked for other ways to help. “I came across an article about a company that makes exoskeleton devices,” Meek says. The mission was on. Meek and SoldierStrong focused on buying and donating the appliances. Meek and his group supply the devices at no cost to veterans’ hospitals, where patients use them during therapy under medical supervision. The suits function like wearable robots. Dan Rose Steve Campbell Photography “It’s basically like leg braces that go into a backpack, with electric motors that guide my foot,” says former Army Reservist Dan Rose, who was paralyzed in 2011 after being hit by an explosion in Afghanistan. “It has sensors on the feet that tell it where my center of gravity is,” says Rose, who uses his suit at home as part of a special study. “Once I get where my center of gravity needs to be, I take the next step.” Dan Rose Steve Campbell Photography Exoskeleton devices overall are a boon to veterans who live with impaired mobility, Paralyzed Veterans of America Executive Director Sherman Gillums, Jr. tells PEOPLE The machines are part of evolving technology that offers “new solutions to helping veterans and the entire disability community overcome barriers to living on their own terms,” says Gillums, Jr Each device presents a cool, sci-fi vibe, but offers good old fashioned elation. “The first time I stood up, it honestly felt like I was standing on top of a mountain,” Rose says. “I went from being seated all the time to being able to stand. I imagine that is the feeling people get when they get to the top of Mount Everest.” The first stand-up always in an emotional event, both for the veterans and their benefactor. “I’ve seen people walk for the first time,” Meek says. “It’s a special emotional moment each time you see it. Most of them cry the first time they stand up.” How would Rose describe the journey to his personal Everest? “It’s life changing,” Rose says. “It’s changed my life for the better. It’s been amazing.” To Meek, that represents a significant reward. “One of my life’s regrets is that I never served, and I’m trying to make up for it today,” Meek says. “For me personally, the big takeaway is giving back.”
<gh_stars>1-10 pub mod video; pub const BASE_URL: &str = "https://www.youtube.com";
//= Register the graph with the system for utility GraphEdit (not really required). // binds variable reg void jhcBwVSrc::graph_reg () { IRunningObjectTable *rtab; IMoniker *id; WCHAR spec[128]; if (FAILED(GetRunningObjectTable(0, &rtab))) return; wsprintfW(spec, L"FilterGraph %08x pid %08x", (DWORD_PTR) manager, GetCurrentProcessId()); if (SUCCEEDED(CreateItemMoniker(L"!", spec, &id))) { rtab->Register(0, manager, id, &reg); id->Release(); } rtab->Release(); }
. Adhesive blood properties have been studied in 100 MS patients with the help of the new method developed on the basis of the leucocyte adherence inhibition (LAI) test, which was based on the calculation of the ratio of adhesive cells to non-adhesive ones. The value obtained was called the Index of Spontaneous Adhesion (ISA), while the respective indicator reflecting the effect of adhesion strengthening under the influence of autoserum, being expressed by 30% and more, was named the Effect of the Adhesion Strengthening (ES-a). Blood samples of 54 donors and 31 patients with other neurological diseases were used as controls. Statistically significant increase of ISA values of MS patients was detected as compared to the group of donors. The highest indices of ISA and ES-a were found in the primarily progressive course and at the stage of MS exacerbation in the remitting course. Correlation between levels of adhesion and clinical features as well as parameters of clinical and humoral immunity are described. A role of membrane and soluble forms of adhesion molecules in initiation and progression of the immunologic process in MS is discussed.
// ScoreByBearish scores candles by if candle is bearish func (q *Quota) ScoreByBearish(score float64) { for _, candle := range *q { if candle.IsBearish() { candle.Score += score } } }
def ecef_pointing(self, t, ant): point = self.pointing(t) if len(point.shape) == 3: k0 = np.zeros(point.shape, dtype=point.dtype) for ind in range(point.shape[2]): k0[:,:,ind] = self._transform_ecef(point[:,:,ind], ant) else: k0 = self._transform_ecef(point, ant) return k0
def age_group(self, age_group): self._age_group = age_group
Behavioral and Cognitive Interventions With Digital Devices in Subjects With Intellectual Disability: A Systematic Review In recent years, digital devices have been progressively introduced in rehabilitation programs and have affected skills training methods used with children and adolescents with intellectual disabilities (ID). The objective of this review is to assess the effects of the use of digital devices on the cognitive functions and behavioral skills in this population, and to acknowledge their potential as a therapeutic tool. Electronic databases were analyzed until February 2020 using search formulas with free terms related to ID and the use of digital systems with children or adolescents. The risk of bias in randomized controlled trials was assessed by means of the modified Cochrane Collaboration tool and the quality level of the non-randomized studies was assessed using the Newcastle-Ottawa Scale. Forty-four studies were analyzed, most of which were categorized as low quality. Of the executive function studies analyzed, 60% reported significant improvements, most commonly related to working memory. Within the cognitive skills, 47% of the studies analyzed reported significant improvements, 30% of them in language. Significant improvements in the social (50%) and behavioral domains (30%) were also reported. These results suggest that digital interventions are effective in improving working memory and academic skills, and positively affect both the social and behavioral domains. Little information has been published regarding the duration of the effects, which could be limited in time. Further research is necessary to assess long-term effectiveness, the influence of comorbidities, and the effects on subjects with severe ID. The inclusion of smartphones and special education centers is also necessary. INTRODUCTION The Diagnostic and Statistical Manual of the American Psychiatric Association, Fifth Edition (DSM-5) (1), defines the concept of "intellectual disability" (ID) as a "disorder that begins during the developmental period and it includes limitations in intellectual functioning and also adaptive behavior in the conceptual, social and practical domains." The meta-analysis of McKenzie et al. (2) reported a prevalence of intellectual disability of somewhat <1%, but more recent studies have reported a rate of 1.2% in American children aged 3-17 years (3,4). Although the prevalence of ID is not the highest among the neurodevelopmental disorders, ID is a chronic disorder that imposes a heavy burden on the family, and is among the top 20 most costly disorders (5,6). The comorbidity or co-occurrence of mental disorders and neurological illness is common in children and adolescents with ID and affects both their clinical progression and the outcomes of interventions (5,(7)(8)(9). The most common co-occurrent mental problems in children are autistic spectrum disorders (ASD), attention-deficit/hyperactivity disorder (ADHD) and behavioral and emotional problems, which are significantly related to the development of different domains of adaptive behavior (5,(9)(10)(11)(12)(13). Independently of comorbid disorders, it has been estimated that there are several overlapping cognitive difficulties in ID related to attention (14)(15)(16), learning (15)(16)(17)(18), memory (15,18,19), perceptive and visuospatial skills (17,20,21), executive functions (15,18,22), processing speed (22), and communication (15,(23)(24)(25). In the field of disability management, functional and psychosocial interventions are used most frequently, but cognitive interventions have also yielded positive results (26)(27)(28). Cognitive training refers specifically to repeated practice in a specific domain to obtain both cognitive and behavioral improvement (29). Although there are few evidence-based strategies available, professionals tend to adapt materials to meet the needs of subjects with ID to overcome difficulties in their day-to-day lives (15,30). Recently, the number of studies describing and evaluating skills training programs (31)(32)(33) has been on the rise, coinciding with the exponential development of information and communication technologies (ICT). Browing et al. (34) were pioneers in using computers to assess the effectiveness of community skills training in children with ID. Digital technologies have easy, clear objectives and instructions, and their virtual environment, striking colors, and entertaining music and sounds can make them attractive and useful tools for interventions with subjects with ID. Although the use of these technologies has increased in recent years with benefits reported in aspects like adaptive behaviors and learning, such as communication and socialization in small children with ID, research focusing on skills generalization and technology use is necessary (35). More specifically, virtual reality has been recommended as a means by which to practice or teach cognitive and emotional skills, robots have been suggested as a way to stimulate and engage children with ID, and handheld or multimedia devices have been recommended as learning supports. Digital media using interactive computer software (31,32,(36)(37)(38) and web-based applications expressly designed to train and practice skills through smartphones or tablets (33,39,40) have both been used in subjects with ID. These programs have a fixed number of sessions of specific lengths, facilitating the process of recording performance measurements as well as longitudinal follow-up. In addition, these programs allow for both the provision of reward feedback and the adjustment of the difficulty of the task. For years, subjects with ID have been using technology to overcome their motor, communication and visual impairments (41), and these devices have contributed to facilitating their performance of day-to-day activities (42,43). However, in order to fully take advantage of digital interventions (44)(45)(46), people with ID may need longer training periods and easier tasks to obtain the most benefit (47). Due to the number and diversity of skills training programs available through digital devices for people with ID, it is important to describe which digital interventions and media have been developed, as well as which are the most effective. Programs and devices have been used to support language learning and communication (48,49), daily living skills, time perception and imagination (42), executive function (50,51), emotional skills (52) and to reduce behavioral problems (33). Due to the lack of systematic reviews conducted to assess the efficacy of digital interventions in children and adolescents with ID, our review focuses on this specific age group and encompasses all digital technology currently in common use. The aims of this study are (1) to assess the use of digital devices in children and adolescents with ID and the effects derived from their use on cognitive functions (e.g., attention, memory, executive functions and language), academic and behavioral skills, daily routines, and social skills, and (2) to determine whether this methodology can be considered a therapeutic tool for subjects with ID. This systematic review will contribute to bringing to light the hard work done with this specific population and will constitute a step forward for the inclusion of people with ID in society and for the improved quality of life for children and adolescents with ID by offering them modern, effective interventions. MATERIALS AND METHODS Prior to the literature search, we registered with the PROSPERO database (register number CRD42019121219) and created a detailed protocol in accordance with the Preferred Reporting Items for Systematic Review and Meta-Analysis Protocol (PRISMA-P) (53). Literature Search Strategy and Information Sources A systematic literature search of SCOPUS, the Web of Science and PsycINFO was carried out that ended in February 2020. The following search formula was created with the following free terms: ("intellectual disability" OR "mental retardation" OR "neurodevel * retardation" OR "cognitive disability") AND ("self-help devices" OR "video games" OR "virtual reality" OR "APPS" OR "tablets" OR "Ipad" OR "computer * ") AND ("child" OR "adolescent"). Study Selection Process and Eligibility Criteria The systematic review and the selected studies were organized according to the participants, interventions, comparators, outcome measures, and study design (PICOS). Participants were children and adolescents with ID (mild to profound) or with syndromes associated with ID. All studies included therapeutic interventions using digital devices such as virtual reality, computers (including laptops), touch screens, input devices and handheld devices . Outcome measures obtained using non-standardized or standardized tests were included if the dependent variables were related to either the cognitive, social, emotional or behavioral domains. The study designs included in our analysis were experimental studies, randomized and quasiexperimental (non-randomized or without control group). We included articles published in peer-reviewed journals in English or Spanish. We excluded studies whose participants were parents or professionals, or had mental illness, traumatic brain injury or sensorial affection. In addition, we excluded studies that did not meet the previously defined PICOS characteristics or which contained poor empirical data. Case studies, reviews, abstracts and communications from scientific meetings and qualitative studies were not considered. The inclusion of the studies was independently reviewed by two authors. A form with inclusion criteria was designed and reviewed by all authors. In the first round, titles and abstracts of articles were selected in accordance with the form. In the second round, we assessed the full-text articles for their selection based on the inclusion criteria. In some cases, we requested the full text from the authors. Duplicated articles were removed. When necessary, any disagreement was discussed with a third author. We created a PRISMA flow chart to track the studies we included and discarded . Risk of Bias Assessment Three reviewers independently assessed the risk of bias for each study using the modified Cochrane Collaboration tool (55) for randomized controlled trials. Bias was assessed as a judgment (high, low or unclear) for individual elements from five domains (selection, performance, attrition, reporting, and other). We converted this score to a quality assessment, indicating that high risk of bias equals low quality, low risk of bias is equal to high quality and unclear risk of bias is equal to moderate quality. For non-randomized studies, we assessed the quality level using Newcastle-Ottawa Quality Assessment Scale (56). Bias was assessed as a judgement (good, low) for individual elements from three domains (selection, comparability and ascertainment), resulting in a total score. The categorization of the quality assessments is reflected in the summary tables. Study Characteristics The flow chart included as Figure 1 illustrates the process of selection for the articles included in this systematic review. First, studies were identified in databases (n = 535) and manual searches (n = 4), and then duplicates were removed (n = 33). The titles and abstracts of the remaining 506 publications were screened to select articles that met the inclusion criteria, and 306 articles were excluded. The remaining 200 full texts were carefully examined. Then the articles that did not report an intervention or that were beyond the scope of this systematic review were excluded (n = 156). In the end, a total of 44 articles were included in this review. The general characteristics are summarized in Table 1, while detailed information for each study is summarized in Tables 2-4. Executive Functions As detailed in Table 2, 10 studies assessed the effect of interventions on executive functions, some of them also evaluated reasoning. Following theoretical models from different authors and the methodology of some of the studies (57, 58) reasoning has been included in executive function analyses. Through all the studies, executive functions were assessed in 462 children and adolescents with ID. The majority of these studies were published within the last decade and only one was published between 2000 and 2009 (59). Sample sizes were small in all of the studies we analyzed (range n = 10 to n = 95). Participant age ranged from 4 to 21. Most of the studies used randomized designs, except two : a quasi-experimental non-randomized study (60), and a quasi-experimental study without control group design (61). Most of the interventions used personal computers (n = 9) (45)(46)(47)(59)(60)(61)(62)(63)(64). There was great variability in the tasks used: repeated sequences (45-47, 60, 61), matrices (62), identification and discrimination (50,60,64), classification and ordering (45,47,59) repeated patterns, mathing and arithmetic operations (45,61,63). Session interventions generally last for a period of between 20 and 30 min or until the completion of a concrete number of tasks or sessions (47,62). The duration of the interventions ranged from 4 to 23 weeks. Posttest evaluations were generally performed only at the end of the program (45,60,61,64) or at 1 or 2 months (50,63). Almost half of the studies did not refer to any follow-up monitoring, and the interval of the rest was between 2 months to 1 year (45,46,50,61,64). The most common function evaluated was working memory (45-47, 50, 60, 61, 63, 64) and reasoning (45,47,62,64). Outcomes measures were obtained by means of behavior tests that assess executive functions such as Behavior Rating Inventory of Executive Function BRIEF and Automated Working Memory Assessment AWMA (45)(46)(47)50), neuropsychological tests applied to children as working memory subtests of Weschler Intelligence Scale (45,47,60,61) and Stroop Task (63,64). Nine studies used standardized tests and six studies used non-standardized quantitative measures. Of the studies analyzed, six reported significant improvements and the remainder reported some improvements or non-significant changes (45,50,(62)(63)(64). Finally, our assessment of the quality of the studies Academic Skills As detailed in Table 4, seven studies assessed the effect of interventions on academic skills adding a total of 264 subjects evaluated. Most of them, concretely six studies were published between 2010 and 2020 and one study was published before 1999. Sample sizes were small in all studies analyzed (range n = 3 to n = 95). Participant age ranged between 3 and 23 years. Regarding design of the studies, most of them (n = 5) were randomized design and two were quasi-experimental without control group (80,81). Respecting technology used in the interventions, half of the studies (n = 5) used personal computer (63,64,73,80,82), some of them (78,81). Concerning evaluated functions, mathematics was the most common (n = 6) (50,63,64,(80)(81)(82). Regarding the tasks used, existed a great heterogeneity. The most common were related to arithmetic operations (63,80), matching or response questions (81,82) and identification and classification (50,64,73). In general, the duration of the sessions was between 10 and 30 min. Most of the studies (n = 6) had a duration ranged between 1 and 4 months. Four studies specify post-test assessments, from just at the end of the intervention to 2 months. Only three studies conducted long-term assessment, they were ranged between 10 weeks and 3 months (50,64,80). In reference to assessment tools, the majority of the studies analyzed used non-standardized quantitative measures (n = 5) , and the rest (n = 2) , used standardized tests. The outcomes reported from the studies analyzed, two showed significant improvements (64,82) and the rest (n = 5) reported some improvements or nonsignificant changes. Six studies were categorized as low quality and one , as moderate quality. Behavioral and Social Skills The studies that assessed the effect of interventions on behavioral and social skills are described in Table 5. Through all the studies, 759 children and adolescent with ID were assessed. Again, most of these studies were published in the last decade (n = 17) . Sample sizes were mostly small (range n = 3 to n = 87) and the age ranges were between 4 and 31 years old. In terms of the design of the studies analyzed, 12 Summary of Main Findings The main objective of this review was to assess the effects of digital interventions on trained skills in children and adolescents with ID. In general, the available evidence suggests that interventions undertaken with digital devices are potentially beneficial in executive function (e.g., working memory, reasoning and planning), basic cognition (as language and attention) and academic (concretely mathematics) training as well as in the social and behavioral domains. The increasing number of studies assessing the effectiveness of digital devices in the last years is noteworthy. Some studies assessed several functions at the same time, in these cases we included the same study in each of our function categories: executive functions, basic cognition skills, and behavioral and social skills (50,63,64). Kirk et al. (50) concluded that attention training did not improve other skills, like receptive vocabulary, phonological abilities, or cardinality. This observation suggests a possible line of research that would focus on more deeply exploring the connection between training in one skill and benefits in others. More than a half the studies we analyzed used randomized designs, albeit most of them evaluated a small number of participants. Studies were very heterogeneous in terms of the age range, tasks and devices used. Computers were the most frequently used device in the studies, followed by handheld devices. However, we noted a lack of studies comparing the same task administered on different devices. In fact, the choice of device seemed to be adapted to the subjects' requirements. Interestingly, it has been suggested that subjects with ID perform better using a mouse than a touch screen (73), which could be attributable to motor difficulties. Numerous tasks (mazes, puzzles, matching, discrimination, and sequences) were developed to train different functions. Although most studies used games, some used videos depicting different scenes and situations to train social skills (83,86,91). Several authors indicated that in order to obtain beneficial effects the tasks must include positive reinforcement, immediate feedback, and frequent repetition (45,46,61,96). Because the lengths of the sessions and the duration of the interventions varied from study to study, we did not have enough data to draw a conclusion in this regard. Two studies described positive outcomes in the social and behavioral domains after a short intervention of 2 weeks (34,86). These studies did not include a follow-up assessment and there was not sufficient data to evaluate long-term effectiveness. We found a wide range of duration, from 4 to 27 weeks in which the effectiveness of the intervention could be demonstrated. Ottersen and Grill (47) replicated the research conducted by Söderqvist et al. (45), but they extended the length of the interventions and then compared the results of the two studies. Comparing short vs. long interventions, they concluded that progress was more constant and stable in working memory and nonverbal reasoning in long interventions. Several studies did not specify the time at which the posttest evaluation was conducted, some did it immediately at the end of the program, while others assessed it between 1 week and 1 year after the training program ended. In general, all of the studies reported improvements when the subjects were evaluated within 1 week of training (60,61,64,66,71,75,78,86,92,94). Only few studies included a follow-up step to assess long-term effects. Improved or maintained skills were reported at 1 and 4 months (46,60,64,74,75,78), 5 months, (75) and at 6 months (66). A 1-year follow-up assessment did not find any effects (45), suggesting that effects could be limited in time and that a repeated intervention would be necessary to maintain the improvements achieved. Several studies comparing digital training with traditional (or typical) interventions suggest that digital methods may be more effective than traditional ones (45-47, 50, 51, 60, 62-65, 72, 76, 86, 89, 91, 94). ID severity is an important factor to take into account, as intelligence level may limit outcomes, and subjects with moderate and severe ID will require greater support to achieve the requirements of the tasks (82). However, many authors did not specify the severity of their subjects' ID, while others combined mild and moderate ID but did not make comparisons between ID severities. Only Passig (59) observed that subjects with mild ID performed better than those with moderate ID. On the other hand, a great number of studies with subjects with mild ID reported benefits (34, 46, 62-64, 67, 72, 75, 78, 81, 85, 87, 89, 95), but few studies reported positive results in the moderate and severe ID population (65,69,70,76). More studies and larger samples comparing task achievement between ID severities are needed. The majority of the studies did not specify the presence or absence of medical or psychiatric comorbidities in children with ID, but when described, the most common were genetic disorders such as Down's, Fragile X (FXS) or Williams syndromes. This is an important issue because comorbidities may add task-specific challenges. For example some difficulties were observed when training spatial knowledge in individuals with Down's syndrome using a virtual environment (96). In some cases, cerebral palsy (CP), ASD, ADHD, and motor and sensory impairment were also present. These comorbidities may interfere with task performance and the outcome of the intervention and should be noted in the results of the studies. It is important to emphasize that new technologies do not replace the work of professionals (80), but they can help in combined interventions. Furthermore, future research is needed to assess the usefulness of other handheld devices, such as smartphones, for educational purposes. New technologies are powerful tools that can also impact the everyday lives of people with ID. The extended use of handheld devices may have beneficial effects on social and relationship skills through the construction of social networks (84) or, on the other hand, such use may have detrimental effects by increasing the risk of cyberbullying aimed at adolescents with ID (97). However, neither handheld devices nor social skills have received much attention in subjects with ID. In agreement with a previous systematic review conducted by den Brok and Sterkenburg (42) including the ID population at all ages, we conclude that there is evidence to support the effectiveness of digital interventions in some daily living, cognitive, academic and social skills domains. However, we did not find evidence supporting long-lasting beneficial effects lasting more than a few months after the cessation of the training sessions. Moreover, to our knowledge, no research has been published that evaluates the effects of repeated interventions on long-term outcomes. In this regard, more follow-up studies are needed to examine the beneficial effects of long-term and repeated interventions. Limitations and Future Directions for Research The main limitation of this review is related to the low quality of the studies analyzed, in part due to their small sample sizes. Our conclusions must be taken with caution, because monitoring the use of digital devices at home was not described and the complex characteristics of the sample could have influenced the study designs. The use of non-standardized tests and assessments rated by parents or teachers (e.g., BRIEF, ABC) was very common, and these carry a potential risk of bias due to the subjective component (46). For future lines of research, it would be interesting to conduct studies in special education centers were training variables in training programs may be easier to control than at home. It is also necessary observe effects in subjects with severe and profound ID. It is important to consider longer follow-up assessments and longer interventions (e.g., 6 and 12 months) due to the lack of studies that make use of these longer formats, and because the improvements reported are probably not permanent (50,78). The influence of the type of disorder giving rise to the intellectual disability as well as the most effective digital devices for use in these types of interventions should also be explored. Despite the limitations described at the methodological level, the data analyzed suggest that digital interventions have potential as a therapeutic tool to benefit working memory, academic skills, and the social and behavioral domains in children and adolescents with ID. DATA AVAILABILITY STATEMENT The original contributions presented in the study are included in the article/supplementary material, further inquiries can be directed to the corresponding author/s. AUTHOR CONTRIBUTIONS Designed form, data collection, and the inclusion analysis of the studies was independently reviewed by MC and MT. When it was necessary, any disagreement was discussed with JC. All authors contributed to the study conception and design, contributed to quality analysis, commented on previous versions of the manuscript, read, and approved the final manuscript. FUNDING This work was supported by Industrial Doctorate from Generalitat de Catalunya reference number 2019 DI 72.
<reponame>oldlie/z-shop package com.iprzd.zshop.http.response; import com.iprzd.zshop.entity.commodity.CommodityInfo; import lombok.Getter; import lombok.Setter; @Getter @Setter public class CommodityInfoResponse extends BaseResponse { private CommodityInfo commodityInfo; }
Photo for representational purposes only. Many doctors working in India's "profit driven" private hospitals are under pressure to carry out unnecessary and "risky" tests and procedures to meet revenue targets, a report published in The BMJ journal has claimed."Doctors who face pressure from hospital management to over-prescribe surgeries or investigations fear for their livelihood," Dr Gautam Mistry, a cardiologist in Kolkata, told The BMJ."Also they need to practise for a certain number of years, and by complaining they would be jeopardising their career," he added.SATHI (Support for Advocacy and Training to Health Initiatives), a non-governmental organisation based in Pune, has for the first time documented the problem.Its recent report, Voices of Conscience from the Medical Profession, comprised interviews by the gynaecologist Arun Gardre with 78 doctors throughout India.Gadre told The BMJ that India has seen a rise in multi-speciality hospitals."Their main aim is to generate revenue and profits for their investors," he said."In the race to earn higher profits, conscience takes a back seat, and doctors are encouraged to indulge in unethical practice," Gadre added."Significant numbers of patients must be advised to be admitted for surgery or medical procedures that bring in real financial profit for the hospital," said Kunal Saha, adjunct professor and HIV/AIDS consultant in Ohio, US, and president of People for Better Treatment, a non-governmental organisation that promotes corruption free healthcare in India."Needless surgery, even simple procedures like tonsillectomy or appendectomy, may cause unexpected hazards for the patient," he said.However, some doctors disagree about the ubiquity of financial targets for doctors, including Dr Devi Shetty, chairman of the Narayana Health Group, which runs 32 hospitals for profit in 20 locations in India and abroad.He argues that setting financial goals for a doctor is not a common practice in India and told The BMJ that Narayana's hospitals do not set financial targets for doctors but do set performance targets to raise efficiency.The Medical Council of India (MCI) is responsible for institutional regulation of medical services, explained Bangalore-based journalist Meera Kay who wrote the BMJ report."But the MCI's reputation is in tatters; its inability to collect data on alleged medical negligence and general failure to bring prosecutions instill no confidence," she wrote."A radical change in the structure and functioning of the MCI is the need of the hour," Gadre told The BMJ."The elected members are all doctors, which could result in a biased outlook," she said. The BMJ said it contacted the MCI and the Association of Healthcare Providers, the trade body for private hospitals, but had not received responses by the time the report was published.
<gh_stars>10-100 import json import torch import torch.nn as nn import torch.nn.functional as F import mxnet as mx import numpy as np # Import comverters from .layers import CONVERTERS # Import PyTorch model template from .pytorch_model_template import pytorch_model_template def eval_model(pytorch_source, pytorch_dict, module_name): # Tricky code torch nn F exec(pytorch_source) globals()[module_name] = locals()[module_name] pytorch_model = locals()[module_name]() pytorch_model.load_state_dict(pytorch_dict) return pytorch_model def render_module(inits, calls, inputs, outputs, dst_dir, pytorch_dict, pytorch_module_name): """ Render model. """ inits = [i for i in inits if len(i) > 0] output = pytorch_model_template.format(**{ 'module_name': pytorch_module_name, 'module_name_lower': pytorch_module_name.lower(), 'inits': '\n'.join(inits), 'inputs': inputs, 'calls': '\n'.join(calls), 'outputs': outputs, }) if dst_dir is not None: import os import errno try: os.makedirs(dst_dir) except OSError as e: if e.errno != errno.EEXIST: raise with open(os.path.join(dst_dir, pytorch_module_name.lower() + '.py'), 'w+') as f: f.write(output) f.close() torch.save(pytorch_dict, os.path.join(dst_dir, pytorch_module_name.lower() + '.pt')) return output def gluon2pytorch(net, args, dst_dir, pytorch_module_name, debug=True, keep_names=False): """ Function to convert a model. """ x = [mx.nd.array(np.ones(i)) for i in args] x = net(*x) # Get network params params = net.collect_params() # Create a symbol to trace net x = [mx.sym.var('__input__' + str(i)) for i in range(len(args))] sym = net(*x) if len(sym) > 1: group = mx.sym.Group(sym) else: group = sym # Get JSON-definition of the model json_model = json.loads(group.tojson())['nodes'] # Create empty accumulators nodes = [] is_skipped = [] pytorch_dict = {} inits = [] calls = [] inputs = [] outputs = [i[0] for i in json.loads(group.tojson())['heads']] last = 0 if keep_names: names_dict = {} else: names_dict = None # Trace model for i, node in enumerate(json_model): # If the node has 'null' op, it means, that it's not a real op, but only parameter # TODO: convert constants if keep_names: names_dict[i] = node['name'] if node['op'] == 'null': if node['name'].find('__input__') == 0: inputs.append(int(node['name'][9:])) is_skipped.append(1) continue # It's not 'null' is_skipped.append(0) # Create dict with necessary node parameters op = { 'name': node['name'][:-4], 'type': node['op'], } print(op, node) if len(node['inputs']) > 0: orginal_inputs = [i for i in np.array(node['inputs'])[:, 0] if i in inputs] op['inputs'] = [i for i in np.array(node['inputs'])[:, 0] if is_skipped[i] != 1 or i in orginal_inputs] else: print(json_model) op['inputs'] = [] try: # Not all nodes have 'attrs' op['attrs'] = node['attrs'] except KeyError: op['attrs'] = {} # Debug output if debug: print(op) print('__') # Append new node to list nodes.append(op) # If operation is in available convertors, convert it if op['type'] in CONVERTERS: init_str, call_str = CONVERTERS[op['type']](i, op, nodes, params, pytorch_dict, names_dict, debug) inits.append(init_str) calls.append(call_str) else: raise AttributeError('Layer isn\'t supported') if names_dict is not None: inputs = ', '.join([names_dict[i] for i in inputs]) outputs = ', '.join([names_dict[i] for i in outputs]) else: inputs = ', '.join(['x' + str(i) for i in inputs]) outputs = ', '.join(['x' + str(i) for i in outputs]) pytorch_source = render_module(inits, calls, inputs, outputs, dst_dir, pytorch_dict, pytorch_module_name) return eval_model(pytorch_source, pytorch_dict, pytorch_module_name)
/* WriteBgmContainer Writes a bgm container from a music sequence and a preset bank. - filepath : The filename + path to output to. Extension included. - presbnk : A preset bank to be exported. - mus : A music sequence to be exported. - nbpadcontent : The nb of bytes of padding to put between the SIR0 header and the header of the first sub-container. Usually 16 to 64 - alignon : Extra padding will be added between sections and at the end of the file so that section start on offsets divisible by that. The SWDL is written first, then the SMDL, and the pointers in the subheader are in that order too ! */ void WriteBgmContainer( const std::string & filepath, const PresetBank & presbnk, const MusicSequence & mus, size_t nbpadcontent, size_t alignon ) { }
<reponame>noahbjohnson/FileTypeCounter<gh_stars>0 import os def changedirectory(): cwd = os.getcwd() print("The current directory is:", cwd) loop = True while loop: changeboolian = input("Would you like to change the working directory? (Y/N)") if changeboolian == "Y" or changeboolian == "N": loop = False else: print("Please type either 'N' or 'Y' to proceed)") if changeboolian == "N": return cwd else: wd = input("What would you like to change the directory to? (Must be an absolute path)") os.chdir(wd) print("Changed directory!") return def getdirectories(): listtoreturn = [] directories = os.listdir(os.getcwd()) for directory in directories: listtoreturn.append(directory) return listtoreturn def getfiles(directories): filelist = [] originaldirectory = os.getcwd() for directory in directories: newWD = originaldirectory + "/" + directory if os.path.isdir(newWD): ls = os.listdir(newWD) for file in ls: filelist.append(file) ls = os.listdir(originaldirectory) for file in ls: filepath = originaldirectory + "/" + file if os.path.isfile(filepath): filelist.append(file) return filelist def hasextension(file): for character in file: if character == ".": return True return False def getextension(file): charlist = [] periodLocations = [] for character in file: charlist.append(character) for i in range(len(charlist)): if charlist[i] == ".": periodLocations.append(i) index = periodLocations[-1] return file[-(len(file) - index - 1):] def getExtensionList(filelist): extensionlist = [] for file in filelist: if hasextension(file): extensionlist.append(getextension(file)) else: extensionlist.append("NONE") return extensionlist def countExtensions(extensionlist): extDict = {} extSet = set() for extension in extensionlist: if extension not in extSet: extSet.add(extension) extDict[extension] = 1 else: extDict[extension] += 1 return extDict def writetofile(extensioncount, path): output = open(path + "/" + "output.csv", 'w') output.write("File Type,Count\n") for key in extensioncount: toprint = str(key) + "," + str(extensioncount[key]) + "\n" output.write(toprint) def filecount(): loop = True while loop: # Include subdirectories query changeboolian = input("Would you like to include subdirectories? (Y/N)") if changeboolian == "Y": directories = getdirectories() loop = False elif changeboolian == "N": directories = [os.getcwd()] loop = False else: print("Please type either 'N' or 'Y' to proceed)") filelist = getfiles(directories) # get list of all file names extensionlist = getExtensionList(filelist) extensioncount = countExtensions(extensionlist) loop = True while loop: # Include subdirectories query changeboolian = input("Would you like to to save the output csv to a different folder? (Y/N)") if changeboolian == "Y": path = input("What is the path to the directory? (must be an absolute path)") loop = False elif changeboolian == "N": path = os.getcwd() loop = False else: print("Please type either 'N' or 'Y' to proceed)") writetofile(extensioncount, path) return def main(): loop = True while loop: print("Welcome to the Filetype Lister 1.0") option = input("Type 'R' to run the tool, 'D' to change directory, or 'exit' to exit the tool: ") if option == "R": confirm = input("Press enter to continue or type anything else to return to the menu") if len(confirm) == 0: filecount() elif option == "D": changedirectory() elif option == "exit": print("Now exiting the tool, Goodbye.") exit() else: print("Error: Input not recognized, returning to the menu.") main()
def create_recurring_meetings(instance, committee): committee_object = Committee.objects.get(committee=committee) create_recurring_events( datetime.datetime.now(), instance['meeting_day'], instance['meeting_interval'], lambda date, semester: create_committee_event(date, semester, instance['meeting_time'], committee_object))
package com.example.administrator.taoyuan.fragment; import android.animation.ObjectAnimator; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.view.ViewPager; import android.util.DisplayMetrics; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.Button; import android.widget.ImageView; import android.widget.ListView; import android.widget.RadioButton; import android.widget.Toast; import com.example.administrator.taoyuan.R; import com.example.administrator.taoyuan.activity_life.fabu; import com.example.administrator.taoyuan.pojo.ListInfo; import com.example.administrator.taoyuan.pojo.ListLifeInfo; import com.example.administrator.taoyuan.utils.MyFragmentPagerAdapter; import java.util.ArrayList; import java.util.List; public class Life extends Fragment implements ViewPager.OnPageChangeListener,View.OnClickListener{ private ImageView line_tab; private ListView lv_lifeinfo; BaseAdapter adapter; final ArrayList<ListLifeInfo.LifeInfo> lifelist= new ArrayList<ListLifeInfo.LifeInfo>(); private ListView lv_dongtai1; ListInfo listinfo; public static final Integer REQUSETCODE=1; private Button tv_fabu; private boolean isScrolling = false; // 手指是否在滑动 private boolean isBackScrolling = false; // 手指离开后的回弹 View view; List<Fragment> fragmentList = new ArrayList<Fragment>(); private ViewPager viewPager; private RadioButton tab1; // private RadioButton tab2; private RadioButton tab3; private int moveOne = 0; private long startTime = 0; private long currentTime = 0; @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { view=inflater.inflate(R.layout.activity_activity_life,null); initView(); Fragment fragment1 = new lifeFriends(); Fragment fragment2 = new LifeAll(); // Fragment fragment3 = new lifeHot(); fragmentList.add(fragment1); fragmentList.add(fragment2); // fragmentList.add(fragment3); initEvent(); // initLineImage(); onPageSelected(0); initLineImage(); return view; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); } private void initView() { viewPager = ((ViewPager) view.findViewById(R.id.viewPager)); tab1 = ((RadioButton) view.findViewById(R.id.rb_peng)); // tab2 = ((RadioButton) view.findViewById(R.id.rb_huati)); tab3 = ((RadioButton) view.findViewById(R.id.rb_suoyou)); line_tab = ((ImageView)view.findViewById(R.id.line_tab_life)); tv_fabu = ((Button)view.findViewById(R.id.tv_fabu)); } public void initEvent(){ viewPager.setAdapter(new MyFragmentPagerAdapter(getChildFragmentManager(),fragmentList)); viewPager.setCurrentItem(0); tab1.setOnClickListener(this); // tab2.setOnClickListener(this); tab3.setOnClickListener(this); viewPager.setOnPageChangeListener(this); tv_fabu.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent=new Intent(getActivity(),fabu.class); intent.putExtra("orderDeatils",tv_fabu.getClass()); startActivityForResult(intent,1); } }); } private void initLineImage() { //获取屏幕的宽度; DisplayMetrics dm = new DisplayMetrics(); getActivity().getWindowManager().getDefaultDisplay().getMetrics(dm); int screen = dm.widthPixels; //重新设置下划线的宽度; ViewGroup.LayoutParams lp = line_tab.getLayoutParams(); lp.width = screen / 2; line_tab.setLayoutParams(lp); //滑动一个页面的距离; moveOne = lp.width; } /** * 下划线跟随手指的滑动而移动 * @param toPosition * @param positionOffsetPixels */ private void movePositionX(int toPosition, float positionOffsetPixels) { // TODO Auto-generated method stub float curTranslationX = line_tab.getTranslationX(); float toPositionX = moveOne * toPosition + positionOffsetPixels; ObjectAnimator animator = ObjectAnimator.ofFloat(line_tab, "translationX", curTranslationX, toPositionX); animator.setDuration(1); animator.start(); } //下划线滑动到新的选项卡中; private void movePositionX(int toPosition) { // TODO Auto-generated method stub movePositionX(toPosition, 0); } @Override public void onClick(View v) { switch (v.getId()){ case R.id.rb_peng: viewPager.setCurrentItem(0); Toast.makeText(getActivity().getApplicationContext(),"点击了第一个小爽",Toast.LENGTH_LONG); break; case R.id.rb_suoyou: viewPager.setCurrentItem(1); break; // case R.id.rb_huati: // viewPager.setCurrentItem(2); // break; } } @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { currentTime = System.currentTimeMillis(); if (isScrolling && (currentTime - startTime > 200)) { movePositionX(position, moveOne * positionOffset); startTime = currentTime; } if (isBackScrolling) { movePositionX(position); } } @Override public void onPageSelected(int position) { switch (position){ case 0: tab1.setTextColor(Color.BLACK); // tab2.setTextColor(Color.GRAY); tab3.setTextColor(Color.GRAY); break; case 1: tab1.setTextColor(Color.GRAY); // tab2.setTextColor(Color.BLACK); tab3.setTextColor(Color.BLACK); break; // case 2: // tab1.setTextColor(Color.GRAY); // tab2.setTextColor(Color.BLACK); // tab3.setTextColor(Color.GRAY); // break; } } @Override public void onPageScrollStateChanged(int state) { switch (state){ case 1: isScrolling = true; isBackScrolling = false; break; case 2: isScrolling = false; isBackScrolling = true; break; default: isScrolling = false; isBackScrolling = false; break; } } }