content
stringlengths
10
4.9M
/** * Converts this instance to a Netty {@link FullHttpResponse}. * * @return the Netty {@link FullHttpResponse} */ default FullHttpResponse toNettyFullHttpResponse() { DefaultFullHttpResponse defaultFullHttpResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status(), bodyContent()); defaultFullHttpResponse.headers().setAll(headers()); return defaultFullHttpResponse; }
<reponame>joakimnyden/ciscodnacnautobot<filename>ciscodnacnautobot/navigation.py #from extras.plugins import PluginMenuButton, PluginMenuItem from nautobot.extras.plugins import PluginMenuButton, PluginMenuItem #from utilities.choices import ButtonColorChoices from nautobot.utilities.choices import ButtonColorChoices menu_items = ( PluginMenuItem( link="plugins:ciscodnacnautobot:status", link_text="Status", permissions=["admin"], buttons=( PluginMenuButton( link="plugins:ciscodnacnautobot:sync_full", title="Settings", icon_class="mdi mdi-all-inclusive", color=ButtonColorChoices.BLUE, permissions=["admin"], ), PluginMenuButton( link="plugins:ciscodnacnautobot:settings", title="Settings", icon_class="mdi mdi-cog", color=ButtonColorChoices.BLUE, permissions=["admin"], ), ), ), )
/** **************************************************************************************** * @brief Function to start the SysTick timer * * @param[in] usec the duration of the countdown * @param[in] exception set to TRUE to generate an exception when the timer counts down * to 0, FALSE not to * * @return void **************************************************************************************** */ void systick_start(uint32_t usec, uint8_t exception) { SetBits32(&SysTick->CTRL, SysTick_CTRL_ENABLE_Msk, 0); SetBits32(&SysTick->LOAD, SysTick_LOAD_RELOAD_Msk, usec-1); SetBits32(&SysTick->VAL, SysTick_VAL_CURRENT_Msk, 0); SetBits32(&SysTick->CTRL, SysTick_CTRL_TICKINT_Msk, exception); SetBits32(&SysTick->CTRL, SysTick_CTRL_CLKSOURCE_Msk, 0); SetBits32(&SysTick->CTRL, SysTick_CTRL_ENABLE_Msk, 1); }
Vice Chair of the Judiciary Committee Kathleen Dumias (D) talks about amendments as the Maryland House of Delegates prepares to vote on Senate Bill 281 on Wednesday in Annapolis. (Jonathan Newton/The Washington Post) The Maryland House of Delegates passed what would be among the nation’s most restrictive gun-control measures Wednesday, voting to ratchet up the state’s already tough rules by requiring fingerprinting of gun buyers, new limits on firearm purchases by the mentally ill, and bans on assault weapons and on magazines that hold more than 10 bullets. The 78 to 61 vote handed Gov. Martin O’Malley (D) a major policy victory as Maryland joins the ranks of Democratic-leaning states passing broad gun-control restrictions in response to the December school shootings in Connecticut — a state where lawmakers Wednesday also passed tough new gun legislation. The bill now returns to the state Senate, which passed a substantially similar version of the legislation last month. Key senators and staffers said they expect the chamber to sign off on changes made by the House and send the measure to O’Malley, who proposed the legislation. Amid the wave of legislative efforts nationally, Maryland’s is the only package whose new requirements would force gun buyers to provide fingerprints and undergo classroom training, target practice and background checks to obtain a license to buy a firearm. No state had sought to impose a licensing requirement in nearly 20 years, a period when the National Rifle Association grew increasingly powerful in American politics. The NRA criticized the Maryland House vote, continuing months of complaints that licensing and fingerprinting amount to a fundamental infringement of a constitutional right. The vote on one of O’Malley’s top priorities of the legislative session, which ends Monday, came as federal gun-control legislation is stalled in Congress: An assault-weapons ban is no longer part of a bill, and universal background checks have bogged down. Maryland would join five states — Connecticut, Hawaii, Massachusetts, New York and New Jersey — in requiring fingerprinting of gun buyers. It also would join seven states and the District in banning a wide array of assault weapons. Under the Maryland bill, any resident wanting to buy a gun would have to pass the new training and testing requirements before receiving an ID card issued by the Maryland State Police. State officials said the ID card would be similar to a driver’s license, probably with a photo. It would have to be renewed every 10 years. Marylanders would not need to get a license to buy hunting rifles and shotguns. With scores of gun rights advocates and gun-control activists in the balconies of the marbled House chamber, Republicans and conservative Democrats on Tuesday made a last stand on the House floor. They forced dozens of votes on amendments designed to weaken or halt the bill, including a move to strip out the licensing requirement. Among other arguments, opponents of the bill said the state lacks enough firing ranges, firearms instructors and state police to process the licenses in a timely manner. A surge in firearm sales in Maryland since December has turned the state’s mandatory seven-day waiting period to buy a gun into a 55-day wait, state police said during the debate. During a speech in Denver to amp support for gun-control legislation, President Obama renewed pressure on Congress to enact stricter background checks. (Nicki Demarco/The Washington Post) Del. Michael A. McDermott (R-Worcester) said the licensing requirement would add even more delay, amounting to a “defacto ban” on gun sales in Maryland. As he left the State House, House Minority Leader Anthony J. O’Donnell (R-Calvert), called the measure a “Mickey Mouse” gun ban designed only to help O’Malley “punch his ticket” for a run at higher office. “It was passed to further the national political ambitions of this governor, and I think it will be challenged in court,” O’Donnell said. Shannon Alford, who led the NRA’s efforts against the bill, said the vote that would matter more would come next year, when every seat in the legislature is up for reelection. “They’re going to have to listen to us in 2014,” she said. “That’s the only poll that counts.” But O’Malley hailed the vote as a historic victory that would make the Maryland safer. “Our state shouldn’t settle for being in the top 10 most violent states in America, there are a lot of lives that can be saved,” he said in a brief interview. “The tragedy in Newtown [Conn.] gave us the inflection point, the ability to forge a consensus that prior to that awful tragedy might not have been possible. Hopefully, we can wrest some good out of that.” Del. Kathleen M. Dumais (D-Montgomery), who led Democrats’ 11-hour defense of the bill, called the fingerprinting and licensing requirement one of the most important parts of the legislation. “This is not just about responding to tragedies” Dumais said. “This bill is to address, specifically, what Maryland needs, and the problem Maryland has is with handguns.” Police and prosecutors lauded the licensing provision, which they predicted would be even more important in curbing crime than the assault-weapons ban. Although assault-type weapons were used in recent mass shootings such as occurred in Newtown and Aurora, Colo., they have been used in less than 1 percent of Maryland homicides since 2004, when a federal assault-weapons ban lapsed. The licensing requirements would help reduce everyday gun crime by slowing the flow of firearms through “straw purchases,” advocates said. In a straw purchase, someone buys a gun for a person not allowed to make the purchase. Prosecutors say the practice is widespread in Prince George’s County and in Baltimore. “What Governor O’Malley is doing here is going to change the national dialogue,” said Vincent DeMarco, a longtime gun-control advocate. “Every state will be looking at what Maryland did and asking if that can be done here.” Under the bill, Maryland would impose blanket restrictions on people involuntarily committed for mental health treatment. The change is similar to one Virginia made after the 2007 mass shooting at Virginia Tech. Residents committed against their will for any length of time would be banned permanently from buying weapons. They would need to petition the state to have their gun rights reinstated. Currently, a patient must be institutionalized for at least 30 consecutive days to lose gun rights — a threshold not met by more than 50,000 Marylanders who have been committed to state facilities but who are allowed to own firearms. The House pulled back from imposing such prohibitions on patients who voluntary seek admission for psychiatric treatment. The Senate voted to ban some patients who are institutionalized after visiting emergency rooms for mental health reasons. The final House vote also followed a protest by Republicans after Democrats engaged in last-minute maneuvers to strip out changes that had been agreed to last week by majorities of two House committees — changes opposed by O’Malley’s office. One would have exempted hundreds of volunteers in the Maryland Defense Force from many provisions of the bill. The head of the state’s National Guard objected to that measure this week, saying most volunteers functioned as support staff with no firearm training or need for special status. Another aborted change would have reduced the minimum age for handgun ownership, now 21, for military veterans and service members. The vote on the bill broke down most starkly along geographical lines, with every lawmaker present from Montgomery, Prince George’s and Baltimore voting for it; almost every delegate from the western, eastern and southern parts of Maryland opposed it.
// RegisterBuilder registers a builder function for a language func RegisterBuilder(l lang.Language, b BuilderFunc) { builderLock.Lock() defer builderLock.Unlock() builders[l] = b }
/* Changes the estimated image projection in such a way that the cost function of the SBA is changed from * squared norm to pseudo-huber. * * double *ImgProjEsti Input & Output -> Pointer to the estimated image projection * double *ImgProjMeas Input -> Pointer to the measured image projection * int mnp Input -> Number of coordinates per image projection * * Return value: none */ void convertCostToPseudoHuber(double *ImgProjEsti, double *ImgProjMeas, int mnp, double thresh) { double deltanorm = 0; double weight; for(int i = 0; i < mnp; ++i) { deltanorm += sqr(ImgProjMeas[i] - ImgProjEsti[i]); } deltanorm = std::sqrt(deltanorm); weight = costPseudoHuber(deltanorm, thresh); for(int i = 0; i < mnp; ++i) { ImgProjEsti[i] = (1-weight) * ImgProjMeas[i] + weight * ImgProjEsti[i]; } }
As I sit down to work on our Android Phone Guide and eat some tasty General Tso’s Chicken, I notice something curious pop up on my HTC Incredible – a big “System Update” dialogue box: Pressing the “More Info” button didn’t yield a very good explanation of what was included: So we know its 3.5MB but that’s about it… and the link they offered to http://www.verizonwireless.com/droidincrediblesupport was currently a broken link. Time to install the update and do some sleuthing for myself! Here is the phone info from before and after: So the Build Number changed from ending in 493 to ending in 494 and the Software number changed from ending in .0 to .2 – so this was definitely wasn’t a phantom update even though the Firmware version still reads 2.1-update1. What exactly did this update accomplish? I’m not sure… but I’ve got word into HTC and Verizon and hopefully we can collectively figure out what this Indredible OTA is all about. I’m guessing it could be something to do with the glitchy signal bar display but that’s just a guess.
<gh_stars>0 import sys import getopt def getArgs(args): mainArgs = [] opts,args = getopt.getopt(args[1:],'-p:',['params=']) for opt_name,opt_value in opts: if opt_name in ('-p','--params'): mainArgs.append(opt_value) return mainArgs
import Player from "@/model/types/Player"; export default class DefaultPlayer implements Player { public readonly id: number; public readonly primary?: boolean; public readonly name?: string; public readonly userId?: string; public score: number = 0; constructor(id: number, primary?: boolean, name?: string, userId?: string) { this.id = id; this.primary = primary; this.name = name; this.userId = userId; } } export const PRIMARY_PLAYER_ID = 1;
/* * Overrides the standard implementation of throttle mapping as different rules apply to * brake based regen. */ int16_t CanBrake::mapPedalPosition(int16_t pedalPosition) { CanBrakeConfiguration *config = (CanBrakeConfiguration *) getConfiguration(); int16_t brakeLevel, range; if (pedalPosition == 0) return 0; range = config->maximumRegen - config->minimumRegen; brakeLevel = -10 * range * pedalPosition / 1000; brakeLevel -= 10 * config->minimumRegen; return brakeLevel; }
<filename>yaaz/src/chess_env.py ##################################################################################### # MIT License # # # # Copyright (C) 2019 <NAME> # # # # This file is part of Yet-Another-AlphaZero. # # # # Permission is hereby granted, free of charge, to any person obtaining a copy # # of this software and associated documentation files (the "Software"), to deal # # in the Software without restriction, including without limitation the rights # # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # # copies of the Software, and to permit persons to whom the Software is # # furnished to do so, subject to the following conditions: # # # # The above copyright notice and this permission notice shall be included in all # # copies or substantial portions of the Software. # # # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # # SOFTWARE. # ##################################################################################### from error_handling.console_logger import ConsoleLogger import chess import numpy as np class ChessEnv(object): _piece_values = { 'p': 0, 'b': 1, 'n': 2, 'r': 3, 'q': 4, 'k': 5, 'P': 6, 'B': 7, 'N': 8, 'R': 9, 'Q': 10, 'K': 11 } def __init__(self): self._board = chess.Board() def step(self, action): self._board.push_uci(action) def get_legal_actions(self): return list(self._board.legal_moves) def is_terminal(self): return self._board.is_game_over() def get_result(self): result = self._board.result() if result == '*': result = '1/2-1/2' return result def get_turn(self): return self._board.turn def is_almost_over(self): return self._board.fullmove_number > 80 def get_action_stack(self): return self._board.move_stack def restore_and_get_actions(self): moves = [] while len(self._board.move_stack) > 0: moves = [self._board.pop()] + moves return moves def get_last_action(self): return self._board.peek() def get_turn_number(self): return self._board.fullmove_number def backward(self): self._board.pop() def copy(self): new_env = ChessEnv() new_env._board = self._board.copy() return new_env def get_ending_reason(self): if self._board.is_checkmate(): return "checkmate" elif self._board.is_stalemate(): return "stalemate" elif self._board.is_insufficient_material(): return "insufficient material" elif self._board.is_fivefold_repetition(): return "fivefold repetition" else: return "unknown" def filter_illegal_probabilities(self, probabilies, is_training, q): """ Source: https://github.com/cbjornst/AlphaZero-Chess/blob/master/moveLogic/MCTS.py#L43 """ probability = list() legal_moves = self.get_legal_actions() for move in legal_moves: fr = move.from_square to = move.to_square y1 = to // 8 y2 = fr // 8 x1 = to % 8 x2 = fr % 8 if self._board.san(move)[0].lower() == "n": if y1 - y2 == 2: if x1 - x2 == 1: z = 56 else: z = 57 elif y1 - y2 == 1: if x1 - x2 == 2: z = 58 else: z = 59 elif y1 - y2 == -1: if x1 - x2 == 2: z = 60 else: z = 61 else: if x1 - x2 == 1: z = 62 else: z = 63 elif self._board.san(move)[-1].lower() in ["n", "b", "r"]: promo = self._board.san(move)[-1].lower() if promo == "n": z = 0 elif promo == "b": z = 3 else: z = 6 if x1 > x2: z += 64 elif x1 < x2: z += 65 else: z += 66 else: dist = chess.square_distance(fr, to) if y1 > y2: if x1 > x2: z = 7 elif x1 < x2: z = 49 else: z = 0 elif y1 < y2: if x1 > x2: z = 21 elif x1 < x2: z = 35 else: z = 28 else: if x1 > x2: z = 14 else: z = 42 z += dist if is_training: try: probabilies[z][y2][x2] = q[legal_moves.index(move)] except: ConsoleLogger.error('len(q): {} probabilies.shape: {} probabilies[z][y2][x2].shape: {} z: {} y2: {} x2: {}' \ ' legal_moves.index(move): {}'.format( \ len(q), probabilies.shape, probabilies[z][y2][x2].shape, z, y2, x2, legal_moves.index(move) \ )) else: probability += [probabilies[z][y2][x2]] if is_training: return probabilies else: softmax = np.exp(probability) softmax = softmax / np.sum(softmax) return softmax def build_state(self, T=2): state = np.zeros((119, 8, 8)) board2 = self._board.copy() for i in range(T): if len(board2.move_stack) > 0: move = board2.peek() else: move = None ChessEnv._build_state_plane(board2, i, state, move) if len(board2.move_stack) > 0: board2.pop() else: break if self._board.turn: state[96] += 1 else: state[96] += 2 state[97] += self._board.fullmove_number if self._board.has_kingside_castling_rights(0): state[98] += 1 if self._board.has_kingside_castling_rights(1): state[99] += 1 if self._board.has_queenside_castling_rights(0): state[100] += 1 if self._board.has_queenside_castling_rights(1): state[101] += 1 if self._board.can_claim_fifty_moves(): state[102] += 1 return state.reshape(1, 119, 8, 8) @staticmethod def _build_state_plane(board, T, state, move): new_board = np.chararray([8, 8], unicode=True) pm = board.piece_map() for i in board.piece_map(): new_board[i // 8][i % 8] = pm[i].symbol() for i in range(8): for j in range(8): if new_board[i][j] is not '': layer = ChessEnv._piece_values[new_board[i][j]] + (12 * T) state[layer][i][j] = 1.0
33 SHARES Facebook Twitter As highly dubious rumors are flying around at the moment suggesting that Daniel Craig is being offered $150 million to return for two more James Bond films (if they are true, I can only imagine would be what he might earn on backend if the films make a particular amount of money), Christoph Waltz, who played classic villain Ernst Blofeld in “Spectre,” is reminding everyone that much about the future of the franchise remains a question mark. “Right now, nobody even knows which studio will produce the next and if Daniel will return. All of that is filed under ‘carry on,’ ” he told German magazine Zeit-Magazin Mann (via CommanderBond). READ MORE: Review: Sam Mendes’ ‘Spectre’ Starring Daniel Craig, Christoph Walz, Lea Seydoux & Ralph Fiennes However, if Craig agrees to play 007 again and if Waltz gets another crack at the villain, he’d welcome the opportunity, because he doesn’t think he got it right the first time out. “I cannot claim that I’ve really nailed Blofeld. Overall, it held water and was okay. But it wasn’t what I’ve been looking for. I was searching for more inspiration,” the actor said, in what sounds like a slight dig at Sam Mendes. But again, don’t count on Waltz’s Blofeld returning, because right now everything is up in the air. “I don’t know about that. Nobody knows. It wasn’t talked about, except in the press,” Waltz said about reprising his part. In summary, despite all the rumors swirling at the moment, the status of the next James Bond movie is currently: ¯\_(ツ)_/¯
/** * Main method * @param args Arguments * @throws Exception If something goes wrong */ public static void main(String[] args) throws Exception { Path seriesList = null; Path watchedList = null; Path output = null; Path cacheDir = null; String outputFormat = null; boolean offline = false; boolean purgeCache = false; for (int i = 0; i < args.length; i++) { String arg = args[i].toLowerCase(); if (arg.equals("-serieslist")) { seriesList = Paths.get(fetch("-seriesList", ++i, args)); } else if (arg.equals("-watchedlist")) { watchedList = Paths.get(fetch("-watchedList", ++i, args)); } else if (arg.equals("-output")) { output = Paths.get(fetch("-output", ++i, args)); } else if (arg.equals("-cachedir")) { cacheDir = Paths.get(fetch("-cacheDir", ++i, args)); } else if (arg.equals("-outputformat")) { outputFormat = fetch("-outputFormat", ++i, args); } else if (arg.equals("-offline")) { offline = true; } else if (arg.equals("-purgecache")) { purgeCache = true; } else { throw new IllegalArgumentException("Unknown argument [" + args[i] + "], all arguments: [" + Arrays.toString(args) + "]"); } } if (purgeCache) { System.out.println("Purging cache directory: " + cacheDir); Files.walkFileTree(cacheDir, new SimpleFileVisitor<Path>() { @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Files.delete(dir); return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } }); } SeriesTracker tracker = new SeriesTracker(seriesList, watchedList, output, cacheDir, outputFormat, offline); tracker.process(); }
/** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/StoreServices.framework/StoreServices */ #import <StoreServices/NSMutableCopying.h> #import <StoreServices/StoreServices-Structs.h> #import <StoreServices/SSCoding.h> #import <StoreServices/NSCopying.h> #import <StoreServices/XXUnknownSuperclass.h> @class NSArray, NSString; @interface SSSoftwareUpdatesContext : XXUnknownSuperclass <SSCoding, NSCopying, NSMutableCopying> { @private NSString *_clientIdentifierHeader; // 4 = 0x4 BOOL _forced; // 8 = 0x8 NSArray *_softwareTypes; // 12 = 0xc } @property(readonly, assign, nonatomic) NSString *clientIdentifierHeader; // G=0x1eeb1; @synthesize=_clientIdentifierHeader @property(readonly, assign, nonatomic) NSArray *softwareTypes; // G=0x1eb69; @synthesize=_softwareTypes @property(readonly, assign, nonatomic, getter=isForced) BOOL forced; // G=0x1eec1; @synthesize=_forced // declared property getter: - (BOOL)isForced; // 0x1eec1 // declared property getter: - (id)clientIdentifierHeader; // 0x1eeb1 - (id)mutableCopyWithZone:(NSZone *)zone; // 0x1ee11 - (id)copyWithZone:(NSZone *)zone; // 0x1ed71 - (id)initWithXPCEncoding:(void *)xpcencoding; // 0x1ed35 - (id)initWithPropertyListEncoding:(id)propertyListEncoding; // 0x1ec9d - (void *)copyXPCEncoding; // 0x1ec69 - (id)copyPropertyListEncoding; // 0x1ebd5 // declared property getter: - (id)softwareTypes; // 0x1eb69 - (void)dealloc; // 0x1eb09 @end
<reponame>mstgnz/eCommerce<filename>src/main/java/com/mstgnz/ecommerce/dto/concrete/CompanyDto.java package com.mstgnz.ecommerce.dto.concrete; import com.fasterxml.jackson.annotation.JsonInclude; import com.mstgnz.ecommerce.dto.notional.IEntityDto; import com.mstgnz.ecommerce.entities.concrete.Company; import lombok.Data; @Data @JsonInclude(JsonInclude.Include.NON_NULL) public class CompanyDto implements IEntityDto { private long id; private String name; public CompanyDto(){ } public CompanyDto(Company company){ this.id = company.getId(); this.name = company.getName(); } }
/** Implementation of the {@link SpanExporter}. */ public final class SpanExporterImpl extends SpanExporter { private static final Logger logger = Logger.getLogger(ExportComponent.class.getName()); private final Worker worker; private final Thread workerThread; /** * Constructs a {@code SpanExporterImpl} that exports the {@link SpanData} asynchronously. * * <p>Starts a separate thread that wakes up every {@code scheduleDelay} and exports any available * spans data. If the number of buffered SpanData objects is greater than {@code bufferSize} then * the thread wakes up sooner. * * @param bufferSize the size of the buffered span data. * @param scheduleDelay the maximum delay. */ static SpanExporterImpl create(int bufferSize, Duration scheduleDelay) { // TODO(bdrutu): Consider to add a shutdown hook to not avoid dropping data. Worker worker = new Worker(bufferSize, scheduleDelay); return new SpanExporterImpl(worker); } /** * Adds a Span to the exporting service. * * @param span the {@code Span} to be added. */ public void addSpan(SpanImpl span) { worker.addSpan(span); } @Override public void registerHandler(String name, Handler handler) { worker.registerHandler(name, handler); } @Override public void unregisterHandler(String name) { worker.unregisterHandler(name); } void flush() { worker.flush(); } void shutdown() { flush(); workerThread.interrupt(); } private SpanExporterImpl(Worker worker) { this.workerThread = new DaemonThreadFactory("ExportComponent.ServiceExporterThread").newThread(worker); this.workerThread.start(); this.worker = worker; } @VisibleForTesting Thread getServiceExporterThread() { return workerThread; } // Worker in a thread that batches multiple span data and calls the registered services to export // that data. // // The map of registered handlers is implemented using ConcurrentHashMap ensuring full // concurrency of retrievals and adjustable expected concurrency for updates. Retrievals // reflect the results of the most recently completed update operations held upon their onset. // // The list of batched data is protected by an explicit monitor object which ensures full // concurrency. private static final class Worker implements Runnable { private final Object monitor = new Object(); @GuardedBy("monitor") private final List<SpanImpl> spans; private final Map<String, Handler> serviceHandlers = new ConcurrentHashMap<String, Handler>(); private final int bufferSize; private final long scheduleDelayMillis; // See SpanExporterImpl#addSpan. private void addSpan(SpanImpl span) { synchronized (monitor) { this.spans.add(span); if (spans.size() > bufferSize) { monitor.notifyAll(); } } } // See SpanExporter#registerHandler. private void registerHandler(String name, Handler serviceHandler) { serviceHandlers.put(name, serviceHandler); } // See SpanExporter#unregisterHandler. private void unregisterHandler(String name) { serviceHandlers.remove(name); } // Exports the list of SpanData to all the ServiceHandlers. private void onBatchExport(List<SpanData> spanDataList) { // From the java documentation of the ConcurrentHashMap#entrySet(): // The view's iterator is a "weakly consistent" iterator that will never throw // ConcurrentModificationException, and guarantees to traverse elements as they existed // upon construction of the iterator, and may (but is not guaranteed to) reflect any // modifications subsequent to construction. for (Map.Entry<String, Handler> it : serviceHandlers.entrySet()) { // In case of any exception thrown by the service handlers continue to run. try { it.getValue().export(spanDataList); } catch (Throwable e) { logger.log(Level.WARNING, "Exception thrown by the service export " + it.getKey(), e); } } } private Worker(int bufferSize, Duration scheduleDelay) { spans = new ArrayList<SpanImpl>(bufferSize); this.bufferSize = bufferSize; this.scheduleDelayMillis = scheduleDelay.toMillis(); } // Returns an unmodifiable list of all buffered spans data to ensure that any registered // service handler cannot modify the list. private static List<SpanData> fromSpanImplToSpanData(List<SpanImpl> spans) { List<SpanData> spanDatas = new ArrayList<SpanData>(spans.size()); for (SpanImpl span : spans) { spanDatas.add(span.toSpanData()); } return Collections.unmodifiableList(spanDatas); } @Override public void run() { while (true) { // Copy all the batched spans in a separate list to release the monitor lock asap to // avoid blocking the producer thread. List<SpanImpl> spansCopy; synchronized (monitor) { if (spans.size() < bufferSize) { do { // In the case of a spurious wakeup we export only if we have at least one span in // the batch. It is acceptable because batching is a best effort mechanism here. try { monitor.wait(scheduleDelayMillis); } catch (InterruptedException ie) { // Preserve the interruption status as per guidance and stop doing any work. Thread.currentThread().interrupt(); return; } } while (spans.isEmpty()); } spansCopy = new ArrayList<SpanImpl>(spans); spans.clear(); } // Execute the batch export outside the synchronized to not block all producers. final List<SpanData> spanDataList = fromSpanImplToSpanData(spansCopy); if (!spanDataList.isEmpty()) { onBatchExport(spanDataList); } } } void flush() { List<SpanImpl> spansCopy; synchronized (monitor) { spansCopy = new ArrayList<SpanImpl>(spans); spans.clear(); } final List<SpanData> spanDataList = fromSpanImplToSpanData(spansCopy); if (!spanDataList.isEmpty()) { onBatchExport(spanDataList); } } } }
// constructs straight-line trajectory with triangular velocity profile, // respective limits of velocity and accel void TrajBuilder::build_triangular_travel_traj(geometry_msgs::PoseStamped start_pose, geometry_msgs::PoseStamped end_pose, std::vector<nav_msgs::Odometry> &vec_of_states) { double x_start = start_pose.pose.position.x; double y_start = start_pose.pose.position.y; double x_end = end_pose.pose.position.x; double y_end = end_pose.pose.position.y; double dx = x_end - x_start; double dy = y_end - y_start; double psi_des = atan2(dy, dx); nav_msgs::Odometry des_state; des_state.header = start_pose.header; des_state.pose.pose = start_pose.pose; des_state.twist.twist = halt_twist_; double trip_len = sqrt(dx * dx + dy * dy); double t_ramp = sqrt(trip_len / accel_max_); int npts_ramp = round(t_ramp / dt_); double v_peak = accel_max_*t_ramp; double d_vel = alpha_max_*dt_; double x_des = x_start; double y_des = y_start; double speed_des = 0.0; des_state.twist.twist.angular.z = 0.0; des_state.pose.pose.orientation = convertPlanarPsi2Quaternion(psi_des); double t = 0.0; for (int i = 0; i < npts_ramp; i++) { t += dt_; speed_des = accel_max_*t; des_state.twist.twist.linear.x = speed_des; x_des = x_start + 0.5 * accel_max_ * t * t * cos(psi_des); y_des = y_start + 0.5 * accel_max_ * t * t * sin(psi_des); des_state.pose.pose.position.x = x_des; des_state.pose.pose.position.y = y_des; vec_of_states.push_back(des_state); } for (int i = 0; i < npts_ramp; i++) { speed_des -= accel_max_*dt_; des_state.twist.twist.linear.x = speed_des; x_des += speed_des * dt_ * cos(psi_des); y_des += speed_des * dt_ * sin(psi_des); des_state.pose.pose.position.x = x_des; des_state.pose.pose.position.y = y_des; vec_of_states.push_back(des_state); } des_state.pose.pose = end_pose.pose; des_state.pose.pose.orientation = convertPlanarPsi2Quaternion(psi_des); des_state.twist.twist = halt_twist_; vec_of_states.push_back(des_state); }
<gh_stars>1-10 import math import torch from torch import nn from torch.nn import Parameter import torch.nn.functional as F from .self_attention_func import self_attn_func from onmt.constants import double_precision class SelfMultiheadAttn(nn.Module): """Multi-headed attention. See "Attention Is All You Need" for more details. """ def __init__(self, embed_dim, num_heads, dropout=0.): super().__init__() self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = dropout self.head_dim = embed_dim // num_heads assert self.head_dim * num_heads == self.embed_dim, "embed_dim must be divisible by num_heads" self.bias = True self.scaling = self.head_dim ** -0.5 self.in_proj_weight = Parameter(torch.Tensor(3 * embed_dim, embed_dim)) self.out_proj_weight = Parameter(torch.Tensor(embed_dim, embed_dim)) self.in_proj_bias = Parameter(torch.Tensor(3 * embed_dim)) self.out_proj_bias = Parameter(torch.Tensor(embed_dim)) self.reset_parameters() self.attn_func = self_attn_func self.optimized = 2 try: # the fast one requires apex and does not work with incremental so careful from apex.contrib.multihead_attn.fast_self_multihead_attn_func import fast_self_attn_func self.attn_func_fast = fast_self_attn_func self.optimized = 1 except ModuleNotFoundError as e: self.optimized = 2 self.attn_func_fast = None def reset_parameters(self): # nn.init.xavier_uniform_(self.in_proj_weight, gain=math.sqrt(2)) # nn.init.xavier_uniform_(self.out_proj_weight) std_ = math.sqrt(2.0 / (self.embed_dim + self.embed_dim)) nn.init.normal_(self.in_proj_weight, 0.0, std_) nn.init.normal_(self.out_proj_weight, 0.0, std_) nn.init.constant_(self.in_proj_bias, 0.) nn.init.constant_(self.out_proj_bias, 0.) def forward(self, query, pos, key_padding_mask=None, attn_mask=None, incremental=False, incremental_cache=None, **kwargs): """Input shape: Time x Batch x Channel Self-attention can be implemented by passing in the same arguments for query, key and value. Future timesteps can be masked with the `mask_future_timesteps` argument. Padding elements can be excluded from the key by passing a binary ByteTensor (`key_padding_mask`) with shape: batch x src_len, where padding elements are indicated by 1s. """ is_training = self.training key = query value = query len_key = key.size(0) input_weights = self.in_proj_weight input_bias = self.in_proj_bias if key_padding_mask is not None: assert (attn_mask is None), "ERROR attn_mask and key_padding_mask should not be both defined!" mask = key_padding_mask if len(mask.shape) == 3: mask = mask.squeeze(1) elif attn_mask is not None: mask = attn_mask if len(mask.shape) == 3: mask = mask.squeeze(0) else: mask = None outputs, coverage = self.attn_func(attn_mask is not None, is_training, self.num_heads, query, input_weights, self.out_proj_weight, input_bias, self.out_proj_bias, mask, self.dropout, incremental, incremental_cache) return outputs, coverage
<gh_stars>1-10 // Copyright 2016 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. package models import ( "fmt" "testing" "code.gitea.io/gitea/modules/git" ) func BenchmarkGetCommitGraph(b *testing.B) { currentRepo, err := git.OpenRepository(".") if err != nil { b.Error("Could not open repository") } for i := 0; i < b.N; i++ { graph, err := GetCommitGraph(currentRepo) if err != nil { b.Error("Could get commit graph") } if len(graph) < 100 { b.Error("Should get 100 log lines.") } } } func BenchmarkParseCommitString(b *testing.B) { testString := "* DATA:||4e61bacab44e9b4730e44a6615d04098dd3a8eaf|2016-12-20 21:10:41 +0100|<NAME>|<EMAIL>|4e61bac|Add route for graph" for i := 0; i < b.N; i++ { graphItem, err := graphItemFromString(testString, nil) if err != nil { b.Error("could not parse teststring") } if graphItem.Author != "<NAME>" { b.Error("Did not get expected data") } } } func TestCommitStringParsing(t *testing.T) { dataFirstPart := "* DATA:||4e61bacab44e9b4730e44a6615d04098dd3a8eaf|2016-12-20 21:10:41 +0100|Author|<EMAIL>|4e61bac|" tests := []struct { shouldPass bool testName string commitMessage string }{ {true, "normal", "not a fancy message"}, {true, "extra pipe", "An extra pipe: |"}, {true, "extra 'Data:'", "DATA: might be trouble"}, } for _, test := range tests { t.Run(test.testName, func(t *testing.T) { testString := fmt.Sprintf("%s%s", dataFirstPart, test.commitMessage) graphItem, err := graphItemFromString(testString, nil) if err != nil && test.shouldPass { t.Errorf("Could not parse %s", testString) return } if test.commitMessage != graphItem.Subject { t.Errorf("%s does not match %s", test.commitMessage, graphItem.Subject) } }) } }
/** * Abstract base class for OrientDB factories. Each concrete implementation is responsible for a * given OrientDB type, i.e. document, object or graph database. Concrete implementations also * encapsulate a pool for the given database type. * <p> * All properties are optional and have default values, except the {@code url} property. * * @author Harald Wellmann * */ public abstract class AbstractOrientDatabaseFactory { /** Default database username. */ public static final String DEFAULT_USERNAME = "admin"; /** Default database password. */ public static final String DEFAULT_PASSWORD = "admin"; /** Default minimum pool size. */ public static final int DEFAULT_MIN_POOL_SIZE = 1; /** Default maximum pool size. */ public static final int DEFAULT_MAX_POOL_SIZE = 20; private String username = DEFAULT_USERNAME; private String password = DEFAULT_PASSWORD; private int minPoolSize = DEFAULT_MIN_POOL_SIZE; private int maxPoolSize = DEFAULT_MAX_POOL_SIZE; private String url; @PostConstruct public void init() { if (url == null) { throw new IllegalArgumentException("url property must not be null"); } ODatabaseComplex<?> db = newDatabase(); createDatabase(db); createPool(); } /** * Creates a database pool. */ protected abstract void createPool(); /** * Gets a new database object from the pool. The returned database is open. * * @return database object */ public abstract ODatabaseComplex<?> openDatabase(); /** * Creates a new transactional database object for the URL set on this factory. * * @return */ protected abstract ODatabaseComplex<?> newDatabase(); /** * Returns the current database object for the current thread. * * @return current database object */ public ODatabaseComplex<?> db() { return ODatabaseRecordThreadLocal.INSTANCE.get().getDatabaseOwner(); } /** * Physically creates a database in the underlying storage. The returned database is closed, * except when the database is of type {@code memory}, since a closed memory database cannot be * reopened. * * @param db database object */ protected void createDatabase(ODatabaseComplex<?> db) { if (! getUrl().startsWith("remote:")) { if (!db.exists()) { db.create(); db.close(); } } } /** * @return Database URL */ public String getUrl() { return url; } /** * Sets the database URL for the database objects produced by this factory. The URL * <em>must</em> be set before invoking any non-accessor method of this factory. * * @param url database URL */ public void setUrl(String url) { this.url = url; } /** * Gets the database username. * * @return the username */ public String getUsername() { return username; } /** * Sets the database username. * * @param username the username to set */ public void setUsername(String username) { this.username = username; } /** * Gets the database password. * * @return the password */ public String getPassword() { return password; } /** * Sets the database password. * * @param password the password to set */ public void setPassword(String password) { this.password = password; } /** * Gets the minimum pool size. * * @return the minPoolSize */ public int getMinPoolSize() { return minPoolSize; } /** * Sets the minimum pool size. * * @param minPoolSize the minPoolSize to set */ public void setMinPoolSize(int minPoolSize) { this.minPoolSize = minPoolSize; } /** * Gets the maximum pool size. * * @return the maxPoolSize */ public int getMaxPoolSize() { return maxPoolSize; } /** * Sets the maximum pool size. * * @param maxPoolSize the maxPoolSize to set */ public void setMaxPoolSize(int maxPoolSize) { this.maxPoolSize = maxPoolSize; } }
/** * Test that using a start generation of zero causes an IllegalArgumentException * * @throws Exception */ @Test(expected = IllegalArgumentException.class) public void testGenerationZero() throws Exception { advancedDatabase.createWithHistory(REV3.revision, 0, toHistory (toRevisionCollection(EnumSet.of(REV3)))); }
import { ErrorIcon } from '@bigcommerce/big-design-icons'; import React, { Children, createContext, Fragment, HTMLAttributes, isValidElement, useMemo, useState } from 'react'; import { warning } from '../../../utils'; import { Checkbox } from '../../Checkbox'; import { Radio } from '../../Radio'; import { FormControlError } from '../Error'; import { StyledError, StyledGroup, StyledInlineGroup } from './styled'; export interface GroupProps extends HTMLAttributes<HTMLDivElement> { errors?: React.ReactNode | React.ReactNode[]; } type Errors = { [inputKey: string]: React.ReactNode | React.ReactNode[]; }; interface Context { errors?: Errors; setErrors?: React.Dispatch<React.SetStateAction<Errors>>; } export const FormGroupContext = createContext<Context>({}); export const FormGroup: React.FC<GroupProps> = (props) => { const [inputErrors, setInputErrors] = useState<Errors>({}); const { children, errors: groupErrors } = props; const childrenCount = Children.count(children); const inline = !Children.toArray(children).every((child) => { return isValidElement(child) && (child.type === Checkbox || child.type === Radio); }); const contextValue = useMemo( () => ({ errors: inputErrors, setErrors: setInputErrors, }), [inputErrors], ); const renderErrors = () => { // If Form.Group has errors prop, don't generate errors from children if (groupErrors) { return generateErrors(groupErrors, true); } return inputErrors && generateErrors(Object.values(inputErrors)); }; return ( <FormGroupContext.Provider value={contextValue}> {inline ? ( <StyledInlineGroup childrenCount={childrenCount}> {children} {renderErrors()} </StyledInlineGroup> ) : ( <StyledGroup> {children} {renderErrors()} </StyledGroup> )} </FormGroupContext.Provider> ); }; const generateErrors = (errors: GroupProps['errors'], fromGroup = false, key?: number): React.ReactNode => { if (typeof errors === 'string') { return ( <Fragment key={key}> <StyledError alignItems="center"> <ErrorIcon color="danger" /> <FormControlError>{errors}</FormControlError> </StyledError> </Fragment> ); } if (isValidElement(errors) && errors.type === FormControlError) { return ( <Fragment key={key}> <StyledError alignItems="center"> <ErrorIcon color="danger" /> {errors} </StyledError> </Fragment> ); } if (Array.isArray(errors)) { return errors.map((error, index) => error && generateErrors(error, fromGroup, index)); } if (!errors) { return null; } if (fromGroup) { warning('errors must be either a string, FormControlError, or an array of strings or FormControlError components.'); } };
<filename>common/src/main/java/sg/bigo/common/customcapture/CameraController.java<gh_stars>1-10 package sg.bigo.common.customcapture; import android.app.Activity; import android.content.res.AssetManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.ImageFormat; import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.opengl.GLES11Ext; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.opengl.GLUtils; import android.os.Handler; import android.os.HandlerThread; import android.os.SystemClock; import android.util.Log; import android.view.TextureView; import java.io.IOException; import java.io.InputStream; import java.util.concurrent.CountDownLatch; import sg.bigo.common.LiveApplication; import sg.bigo.common.customcapture.ve_gl.EglBase; import sg.bigo.common.customcapture.ve_gl.GlRectDrawer; public class CameraController implements SurfaceTexture.OnFrameAvailableListener{ private final String TAG = "CameraController"; private int mDisplayOrientation;//旋转角度 private int mRotation; private CameraPosion mPosion;//前置摄像头和后置摄像头 private int mPreViewWidth;//预览宽度 private int mPreViewHeight;//预览高度 private int mPreViewFps;//帧率 private Camera.Size mPreviewSize; private Camera mCamera; private Camera.CameraInfo mCameraInfo; private GLSurfaceView mGlsurfaceView; private SurfaceTexture mSurfaceTexture; private int mSurfaceTextureId; private boolean updateSurface; private final float[] mTexMtx = GlUtil.createIdentityMtx(); private RenderScreen mRenderScreen = null; //frameBufferId private RgbaRenderFilter mRenderFilter = null; private int mFrameBufferId = 0; private int mPreviewTextureId = 0; private EglBase previewEglBase; private static CameraController instance; private Bitmap mBitmap; private TextureView mTextureView; private HandlerThread mThread; private Handler mHandler; public enum CameraPosion { FRONT, BACK } private CameraController() { mDisplayOrientation = 0; mRotation = 0; mPosion = CameraPosion.FRONT; mPreViewWidth = 720; mPreViewHeight = 1280; mPreViewFps = 30; mThread = new HandlerThread("CameraController" + hashCode()); mThread.start(); mHandler = new Handler(mThread.getLooper()); } public static synchronized CameraController getInstance() { if (instance == null) { instance = new CameraController(); } return instance; } private int openCommonCamera() { int cameraId = mPosion == CameraPosion.FRONT ? Camera.CameraInfo.CAMERA_FACING_FRONT : Camera.CameraInfo.CAMERA_FACING_BACK; int numberOfCameras = Camera.getNumberOfCameras(); mCameraInfo = new Camera.CameraInfo(); for (int i = 0; i < numberOfCameras; i++) { Camera.getCameraInfo(i, mCameraInfo); if (mCameraInfo.facing == cameraId) { mCamera = Camera.open(i); } } return cameraId; } private Activity mActivity; public boolean openCamera(Activity activity, GLSurfaceView glSurfaceView, TextureView aux_view) { mActivity = activity; mBitmap = createBitmapFromAsset(); boolean b = true; mGlsurfaceView = glSurfaceView; mTextureView = aux_view; mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { Log.e(TAG, "onSurfaceTextureAvailable: " + Thread.currentThread().getName() + "" + Thread.currentThread().getId()); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { Log.e(TAG, "onSurfaceTextureSizeChanged: " + Thread.currentThread().getName() + "" + Thread.currentThread().getId()); } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { Log.e(TAG, "onSurfaceTextureDestroyed: " + Thread.currentThread().getName() + "" + Thread.currentThread().getId()); closeCamera(); return false; } @Override public void onSurfaceTextureUpdated(SurfaceTexture surface) { Log.e(TAG, "onSurfaceTextureUpdated: ===== " + Thread.currentThread().getName() + "" + Thread.currentThread().getId()); } }); initSurfaceTexture(); return b; } public void closeCamera() { try { if (mRenderFilter != null) { mRenderFilter.destroy(); mRenderFilter = null; } if (mCamera != null) { mCamera.stopPreview(); mCamera.release(); mCamera = null; } } catch (Exception e) { Log.e(TAG, "closeCamera: " + e); } finally { final CountDownLatch barrier = new CountDownLatch(1); mHandler.post(() -> { previewEglBase.makeCurrent(); if(mFrameBufferId != 0) { int[] fbos = new int[] { mFrameBufferId }; GLES20.glDeleteFramebuffers(1,fbos,0); mFrameBufferId = 0; } if(mPreviewTextureId != 0) { int[] textureIds = new int[] { mPreviewTextureId }; GLES20.glDeleteTextures(1,textureIds, 0); mPreviewTextureId = 0; } if(mSurfaceTextureId != 0) { int[] textureIds = new int[] { mSurfaceTextureId }; GLES20.glDeleteTextures(1,textureIds, 0); mSurfaceTextureId = 0; } mRenderScreen = null; previewEglBase.detachCurrent(); if(previewEglBase != null) { previewEglBase.release(); previewEglBase = null; } barrier.countDown(); }); try { barrier.await(); } catch (InterruptedException e) { e.printStackTrace(); } } } public Camera.Size getmPreviewSize() { return mPreviewSize; } private Bitmap createBitmapFromAsset() { Bitmap bitmap = null; try { AssetManager assetManager = LiveApplication.Companion.getAppContext().getAssets(); InputStream is = assetManager.open("output.png"); bitmap = BitmapFactory.decodeStream(is); if (bitmap != null) { System.out.println("测试一:width=" + bitmap.getWidth() + " ,height=" + bitmap.getHeight()); } else { System.out.println("bitmap == null"); } } catch (Exception e) { System.out.println("异常信息:" + e.toString()); } return bitmap; } float[] transformationMatrix = new float[]{1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f}; /** * 是否横屏 * * @return */ public boolean isLandscape() { return false; } private void setPameras(int cameraId) { mDisplayOrientation = cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT ? (mCameraInfo.orientation + mRotation) % 360 : (mCameraInfo.orientation - mRotation + 360) % 360; boolean portrait = false; boolean upsideDown = false; if (mDisplayOrientation == 0) { portrait = true; upsideDown = false; } else if (mDisplayOrientation == 90) { portrait = false; upsideDown = false; } else if (mDisplayOrientation == 180) { portrait = true; upsideDown = true; } else if (mDisplayOrientation == 270) { portrait = false; upsideDown = true; } Camera.Size preViewSize = CameraUtils.getOptimalPreviewSize(mCamera, mPreViewWidth, mPreViewHeight, portrait); Log.i(TAG, "setPameras: preViewSize width " + preViewSize.width + " height " + preViewSize.height + " ,mDisplayOrientation " + mDisplayOrientation); Camera.Parameters parameters = mCamera.getParameters(); //parameters.setRotation(mRotation); parameters.setPreviewFormat(ImageFormat.NV21); parameters.set("orientation", "portrait"); parameters.setPreviewSize(preViewSize.width, preViewSize.height); if (upsideDown) { mCamera.setDisplayOrientation(180); } int[] range = CameraUtils.adaptPreviewFps(mPreViewFps, parameters.getSupportedPreviewFpsRange()); parameters.setPreviewFpsRange(range[0], range[1]); mCamera.setParameters(parameters); mPreviewSize = preViewSize; } private GlRectDrawer previewDrawer; private void initSurfaceTexture() { final CountDownLatch barrier = new CountDownLatch(1); mHandler.post(() -> { previewEglBase = EglBase.create(null, EglBase.CONFIG_RGBA); if (!mTextureView.isAvailable()) { return; } try { // 创建用于预览的EGLSurface previewEglBase.createSurface(mTextureView.getSurfaceTexture()); } catch (RuntimeException e) { previewEglBase.releaseSurface(); } previewDrawer = new GlRectDrawer(); barrier.countDown(); }); try { barrier.await(); } catch (InterruptedException e) { e.printStackTrace(); } mHandler.post(() -> { previewEglBase.makeCurrent(); int[] textures = new int[1]; GLES20.glGenTextures(1, textures, 0); //摄像头纹理 mSurfaceTextureId = textures[0]; mSurfaceTexture = new SurfaceTexture(mSurfaceTextureId); mSurfaceTexture.setOnFrameAvailableListener(CameraController.this); GLES20.glDisable(GLES20.GL_DEPTH_TEST); GLES20.glDisable(GLES20.GL_CULL_FACE); GLES20.glDisable(GLES20.GL_BLEND); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mSurfaceTextureId); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); stopCamera(); startCamera(); previewEglBase.detachCurrent(); }); } public void startCamera() { int cameraId = openCommonCamera(); try { mCamera.setPreviewTexture(mSurfaceTexture); } catch (IOException e) { e.printStackTrace(); } setPameras(cameraId); Log.i(TAG, "onSurfaceChanged"); try { } catch (Exception e) { Log.e(TAG, "onSurfaceChanged: " + e); } mCamera.startPreview(); } public void stopCamera() { if (mCamera != null) { mCamera.stopPreview(); mCamera.release(); mCamera = null; } } private int mBitmapTextureId = 0; @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { Log.i(TAG, "onFrameAvailable"); synchronized (this) { updateSurface = true; } mHandler.post(() -> { if (previewEglBase == null) return; previewEglBase.makeCurrent(); synchronized (CameraController.this) { if (updateSurface) { //把数据给了mSurfaceTextureId mSurfaceTexture.updateTexImage(); mSurfaceTexture.getTransformMatrix(mTexMtx); updateSurface = false; } } if (mRenderScreen == null) { mRenderScreen = new RenderScreen(mSurfaceTextureId, true); mRenderScreen.setSreenSize(mPreViewWidth, mPreViewHeight); } if (mPreviewTextureId == 0) { GLES20.glActiveTexture(GLES20.GL_TEXTURE1); mPreviewTextureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mPreViewWidth, mPreViewHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); mFrameBufferId = GlUtil.generateFrameBuffer(mPreviewTextureId); } else { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBufferId); } GlUtil.rotateTextureMatrix(mTexMtx, mDisplayOrientation); mRenderScreen.draw(mTexMtx); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); LiveApplication.Companion.avEngine().sendCustomVideoCaptureTextureData(mPreviewTextureId, mPreViewWidth, mPreViewHeight, LiveApplication.Companion.getNeedCustomUpsideDown(), LiveApplication.Companion.getNeedCustomMirror(), SystemClock.elapsedRealtime()); // sendStaticImage(); previewEglBase.detachCurrent(); }); } private void sendStaticImage() { if (mBitmapTextureId == 0) { GLES20.glActiveTexture(GLES20.GL_TEXTURE2); mBitmapTextureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mBitmap, 0); } if (mPreviewTextureId == 0) { GLES20.glActiveTexture(GLES20.GL_TEXTURE1); mPreviewTextureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mPreviewSize.width, mPreviewSize.height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); mFrameBufferId = GlUtil.generateFrameBuffer(mPreviewTextureId); } else { // 绑定帧缓冲区fbo // Bind frame buffer GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBufferId); } if (previewDrawer == null) { previewDrawer = new GlRectDrawer(); } GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); previewDrawer.drawRgb(mBitmapTextureId, transformationMatrix, 720, 1280, 0, 0, 720, 1280); LiveApplication.Companion.avEngine().sendCustomVideoCaptureTextureData(mPreviewTextureId, 720, 1280, 0, true, SystemClock.elapsedRealtime()); } }
Legendary painter Bob Ross raked in cash with his art empire, but his son claims he was left out to dry after the painter’s merchandise continued making money following his death, and now he’s filed a lawsuit. Robert Ross says after his father died in 1995, he and his uncle were granted all the rights to Ross’ name, likeness, and right of publicity. His uncle recently transferred his interest to Robert, who now owns 100% of Ross’ image. Robert claims the company his dad started, Bob Ross Inc., continues to make money using the painter’s image and likeness, and is currently selling all types of Ross merch, including clothes, costumes, games, toy figures, underwear, soap, sticky notes, puzzles, Chia pets, and coloring books. He also says the recent streaming success of Ross’ “The Joy of Painting” series on Netflix and Twitch has created a renewed interest in the painter. Robert says all products and deals made with Ross’ image — which he hasn’t seen any money for — are unauthorized and wants them shut down immediately. He’s wants a full audit of all profits made using Ross’ image, and to be paid out for the mistake. As the legend himself would have said, “There are no mistakes, just happy accidents.”
// Loads the textures that have been embedded in the resource file and puts them into // the texture storage provided. // device: The Direct3D device to use to load the textures. // storage: The ITextureStorage instance to store the textures in. void load_default_textures(const std::shared_ptr<graphics::IDevice>& device, const std::shared_ptr<ITextureStorage>& storage) { Resource texture_list = get_resource_memory(IDR_TEXTURE_LIST, L"TEXT"); auto contents = std::string(texture_list.data, texture_list.data + texture_list.size); std::stringstream stream(contents); while (!stream.eof()) { std::string key; int resource_id = 0; if (!(stream >> key >> resource_id)) { break; } storage->store(key, load_texture_from_resource(*device, resource_id)); if (!std::getline(stream, key)) { break; } } }
Illustration: Andrew Dyson Last time around, after the 2010 election, it took 17 days before Julia Gillard was able to tell the governor-general that she had a majority in the House and could form a government. There is no time limit on how long negotiations might continue – what if it took 27 days, or 97 days, or 197 days? Or even 589 days? Because that's how long it took Belgium to form a government after its 2010 election. Can you imagine the chaos – no government for a year and a half! The mayhem must have been dreadful to behold. Wasn't it? "On a day-to-day basis," reported the BBC's Stephen Mulvey after a year without government, "Belgium is ticking along nicely. Its economy is growing, exports are up, inward foreign investment has continued, the country's presidency of the European Union in 2010 was deemed a success, and it has contributed to the NATO bombing of Libya." Will Australia have as long a wait as Belgium did for a new prime minister? Credit:Andrew Meares A Belgian filmmaker, Dan Alexe, told the Independent: "The trains and buses still run. The police are still operating. The post is late, but then it always was late. Maybe having 'no government' is preferable to having governments which collapse all the time." Belgium is not Australia. Its population is 11 million, less than half Australia's. But there are strong similarities. It's a stable, high-income, democratic federation. Like Australia, it's a constitutional monarchy. It's a US ally. Illustration: Andrew Dyson Its main political divide is not partisan but cultural, with the prosperous Dutch-speaking north, home to 60 per cent of the population, resentful at the less prosperous French-speaking south. There was a moment when the divide was so bitter that commentators predicted the country would break into two. It didn't. When no party won a majority after its 2010 election, its parties refused to compromise and stalemate settled in while the incumbent government continued as caretaker, operating under a strictly limited mandate. There was a protest rally against the political standoff, but attempts to hold more demonstrations failed through lack of interest. "By and large, everything still works, a professor at Ghent University, Marc De Vos, explaining the lack of public outrage. "We get paid, buses run, schools are open." By some measures, the country appeared to do better than neighbours with elected governments. "The caretaker government last month headed off market jitters over its debt levels by quickly agreeing on a tighter budget," Time magazine reported in February 2011, eight months into the political limbo. "The country is recovering well from the downturn, with growth last year at 2.1 per cent (compared with the EU average of 1.5 per cent), foreign investment doubling and unemployment at 8.5 per cent, well below the EU average of 9.4 per cent." One of the major lessons, concluded a pair of Belgian academics, Geert Bouckaert and Marleen Brans, was that "in mature democracies, a power vacuum is taken care of in a constructive, creative, and responsible way." And indeed, look around in Australia on the first business day after the country was supposedly rendered "ungovernable" by Saturday's election. On Monday the share market was up and so was the Australian dollar. Election campaign periods, when a caretaker government is running the country, have amply illustrated one of the reasons a mature democracy can operate smoothly without an active government. Two examples. The Reserve Bank raised interest rates during the 2007 campaign, infuriating John Howard. The Federal Police raided the homes of staff of powerful Labor politicians during the 2016 campaign, only telling the government and opposition when the raids were about to begin. The country's work is mostly done by independent institutions such as the Reserve Bank or the courts, or autonomous institutions like the Federal Police, that continue to function without political direction. The states and local councils, of course, operate as usual. In fact, Belgium under a caretaker ran so smoothly that a former deputy prime minister of the country, Karel De Gucht, told me after nine months of political limbo that "I worry that it's going too well". He feared that the people would be quite content to live without a government, as long-term problems accumulated without any solution. He proved to be right. Belgium's debt continued to accumulate under the caretaker and finally, the politicians agreed to form a six-party coalition when the country's sovereign credit rating was downgraded, sending a jolt through the system. So there is no need to panic about an interregnum under a caretaker government in Australia . It's not as if the elected governments did a great job of managing Australia's deficits, in any case. Debt continued to mount under Labor and Liberal administrations for eight years continuously. The credit rating agencies on Monday warned that Australia eventually needed to bring the debt problem under control. Without an active, elected government, long-term problems will accumulate. But then, many have accumulated under Australia's elected governments too. The problem is not the uncertainty of a caretaker period. The problem is the long-run conduct of the parties themselves. The political parties need to jolt themselves out of their self-involved partisan games or, eventually, a crisis will do it for them. Peter Hartcher is political editor.
Association of Promoter Methylation of RUNX3 Gene with the Development of Esophageal Cancer: A Meta Analysis Background Runt-related transcription factor 3 (RUNX3) is a member of the runt-domain family of transcription factors. Emerging evidence indicates that RUNX3 is a tumor suppressor gene in several types of human cancers including esophageal cancer. However, the association between RUNX3 promoter methylation and esophageal cancer remains unclear. Here we conducted a systematic review and meta-analysis to quantitatively evaluate the effects of RUNX3 promoter methylation on the incidence of esophageal cancer. Methods A detailed literature search was made on Medline, Pubmed and Web of Science for related research publications written in English and/or Chinese. Methodological quality of the studies was also evaluated. The data were extracted and assessed by two reviewers independently. Analysis of pooled data were performed, the odds ratios (OR) were calculated and summarized respectively. Results Final analysis of 558 patients from 9 eligible studies was performed. The result showed that RUNX3 methylation was significantly higher in esophageal cancer than in normal squamous mucosa from the proximal resection margin or esophageal benign lesions (OR = 2.85, CI = 2.01–4.05, P<0.00001). The prevalence of lymph node involvement, tumor size (T1–T2 vs T3–T4) and histological grade was significantly greater in RUNX3-negative cases (RUNX3 unmethylated groups) than in RUNX3-positive cases (OR = 0.25, CI = 0.14–0.43, P<0.00001). RUNX3 methylation was significantly higher in esophageal adenocarcinoma (EAC) than Barrett’s esophagus (OR = 0.35, CI = 0.20–0.59, P<0.0001). In addition, the pooled HR for overall survival (OS) showed that decreased RUNX3 expression was associated with worse survival in esophageal cancer (HR = 4.31, 95% CI = 2.57–7.37, P<0.00001). Conclusions The results of this meta-analysis suggest that RUNX3 methylation is associated with an increased risk, progression as well as worse survival in esophageal cancer. RUNX3 methylation, which induces the inactivation of RUNX3 gene, plays an important role in esophageal carcinogenesis. Introduction Esophageal cancer is the eighth most common cancer worldwide, esophageal squamous cell carcinoma (ESCC) and adenocarcinoma (EAC) are two major histopathological types of esophageal cancer . It has been reported that esophageal tumor currently affects more than 450, 000 people worldwide and the incidence is still increasing . Surgery is the standard therapy for esophageal tumor . However, the overall prognosis is far less satisfactory and pretty modest, with 5-year survival rates ranging between 15 and 50% . Therefore, investigating the mechanism of initiation and progression and finding out the therapeutic targets at biomolecular levels are highly desired for the treatment of esophageal cancer. The Runt-related transcription factor 3 (RUNX3) gene is a tumor suppressor gene involved in the TGF-b signaling pathway, which was cloned and identified as a human runt-domain containing gene in 1994 . Its precise function has been intensively studied in gastric cancer, with upregulation inducing cell cycle arrest, apoptosis, and down regulating cyclin D1 expression , however, its role in esophageal cancer has not been thoroughly investigated and reviewed. Inactivation of RUNX3 by promoter methylation has been found to play an important role during normal tissue development and in tumorigenesis in esophagus . In this study, we reviewed and performed a meta analysis on the published clinical studies regarding the effect of RUNX3 on patients with esophageal cancer. Search strategy Medline, Pubmed and Web of Science were searched in December 2013 using the search terms: ''esophageal'' and ''cancer or tumor or neoplasm or carcinoma'' and ''RUNX3''. Studies identified through the approaches as described above were screened by titles first, then abstracts of the publications. After exclusion of non-relevant publications and identifications of duplicates from the different databases, the remaining papers were evaluated in the full text version for in-and exclusion criteria and for relevant articles in the reference lists. All clinical studies except case reports were chosen. The language of publication was restricted to English and Chinese. All searched data were retrieved. Authors' bibliographies and references of selected studies were also searched for other relevant studies. The most complete study was chosen to avoid duplication if same patient populations were reported in several publications. Selection criteria We collected all eligible articles about relationship between RUNX3 methylation and/or expression and clinicopathological features and clinical outcomes in esophageal cancer in this metaanalysis. Studies meeting the following inclusion criteria were included: (1) RUNX3 methylation and/or expression evaluated in the circulation and/or primary esophageal cancer tissues, (2) studies revealing the relationship between RUNX3 methylation and/or expression and esophageal cancer clinicopathological parameters and prognosis, (3) RUNX3 methylation and/or expression examined by polymerase chain reaction (PCR), (4) articles published as a full paper in English, (5) studies providing sufficient information to estimate hazard ratio (HR) about overall survival (OS) and 95% confidence interval (CI). The exclusion criteria included the following: (1) letters, reviews, case reports, conference abstracts, editorials, expert opinion, non-English, non-Chinese language papers; (2) articles with no information on OS or insufficient information for calculation of HR; and (3) all publications regarding in vitro/ex vivo studies, cell lines and human xenografts. Data extraction. Two investigators independently extracted data from eligible studies. Disagreements were resolved by discussion and consensus. Two investigators reviewed all of articles that fit inclusion and exclusion criteria. The following information was recorded for each study: the first author name, year of publication, sample source, number of cases, clinicopathological parameters, cancer with tumor node metastasis (TNM) stage, RUNX3 methylation and/or expression, and patient survival. Data for study characteristics and clinical response were summarized and turned the data into table format. Heterogeneity of investigation was evaluated to determine whether the data of the various studies were appropriate for a meta-analysis. Statistical analysis. Analysis was conducted using the Stata 12.0 (Stata Corporation, TX, USA) and Review Manager 5.2 (Cochrane Collaboration, Oxford, UK). Comparisons of dichotomous measures were done by pooled estimates of odds ratios (ORs) as well as their 95% CIs. P value of ,0.05 was considered to be statistically significant. Heterogeneity was examined by a chisquare test with significance set at P,0.10; the total variation among studies was estimated by I square. If there was heterogeneity among studies, we used a random effect model to pool the ORs; otherwise, a fixed effect model was chosen. The database search generated 14 articles from Pubmed and the Web of Science. After initial screening of all titles, abstracts and eligibility, 9 full-text studies were retracted for more detailed assessment. The search of the article references did not produce additional publications. Eventually, 9 publications met the inclusion criteria for qualitative study and for meta-analysis. The article search and study selection is depicted in Figure 1. Identification of relevant studies Fourteen publications were identified by using the search method as described above. Five of those were excluded due to laboratory studies, non-original articles (review), or studies irrelevant to the current analysis. Eventually, there were nine studies included in the final meta-analysis Study characteristics Nine studies published from 2005 to 2013 were eligible for meta-analysis. A total of 558 patients including esophageal squamous cell carcinomas (ESCCs) and esophageal adenocarcinomas (EACs) from China, Japan, Australia and USA were enrolled. A total of 140 cases of Barrett's esophagus (BE) were also included in this analysis. Their basic characteristics are summarized in Table 1. RUNX3 methylation and expression and clinicopathological features 1. Inactivation of RUNX3 through methylation in esophageal cancers. It was reported that the loss of RUNX3 mRNA expression was statistically correlated with the promoter hypermethylation in esophageal tumors (P,0.001) . We observed that RUNX3 methylation was significantly higher in ESCC/EAC than in normal squamous mucosa from the proximal resection margin or esophageal benign lesions. The pooled OR from 6 studies including 347 esophageal cancers and 246 normal squamous mucosa was shown in Figure 2 (OR = 2.85, CI = 2.01-4.05, P,0.00001), which indicated that RUNX3 inactivation through methylation plays an important role in the pathogenesis of esophageal cancers. 2. Role of RUNX3 methylation in esophageal cancer development. We analyzed 263 patients pooled in 4 studies to assess whether the aberrant RUNX3 methylation/expression in serum/cancer tissues DNA was associated with advanced stage, including tumor size (T1-T2 vs T3-T4), lymph node involvement, lymph and blood vessels metastasis, and recurrence in esophageal carcinomas. RUNX3 methylation/expression estimated in biopsy/ blood samples and clinicopathological factors as described above were examined. The prevalence of lymph node involvement, tumor size (T1-T2 vs T3-T4) and histological grade was significantly greater in RUNX3-negative cases (RUNX3 unmethylated groups) than in RUNX3-positive cases (Figure 3), with OR = 0.25, CI = 0.14-0.43, P,0.00001. These results suggest that epigenetic silencing of RUNX3 gene expression by promoter hypermethylation may play an important role in esophageal cancer progression and development. With similarity in pathologic stage, Hiramatsu et al showed that RUNX3 expression was significantly higher in 19 well-differentiated ESCCs than in 56 moderately or 69 poorly differentiated ESCCs (p,0.01). Barrett's esophagus (BE) is the metaplastic replacement of squamous with columnar epithelium in the esophagus, as a result of reflux. It is a major risk factor for the development of EAC . We observed that RUNX3 methylation was significantly higher in EAC than in BE as shown in Figure 4, OR = 0.35, CI = 0.20-0.59, P,0.0001. RUNX3 hypermethylation is an independent risk factor for progression of BE to high-grade dysplasia of esophagus and EAC . No heterogeneity was observed in the analysis of RUNX3 methylation/low expression in normal samples and esophageal patient samples (P = 0.81), in BE and EAC patients (P = 0.88). There is no heterogeneity of RUNX3 methylation with advanced stage (P = 0.81), so the fixed effect model was used. 3. RUNX3 as a prognostic factor for esophageal cancer. Four studies included investigated the relationship between OS and RUNX3 methylation/expression. The pooled HR for OS showed that decreased RUNX3 expression was associated with worse survival in esophageal cancer as shown in Figure 5 (HR = 4.31, 95% CI = 2.57-7.37, P,0.00001). 4. Sensitivity analyses and publication bias. A sensitivity analysis, in which one study was removed at a time, was conducted to assess the result stability. The pooled ORs and HRs were not significantly changed, indicating the stability of our analyses. The funnel plots were largely symmetric ( Figure 6) suggesting there were no publication biases in the meta-analysis of RUNX3 methylation/expression and clinicopathological features as well as overall survival respectively. Discussion RUNX3 belongs to the runt domain family of transcriptional factors that plays an important role during normal tissue development and in tumorigenesis . RUNX3 inactivation is a crucial factor to determine cancer pathogenesis and clinical outcome in a variety of cancer types . The different modes of RUNX3 inactivation in various tumor types include hemizygous deletion, mutations, hypermethylation, histone modifications and cytoplasmic mislocalization . To date, there have been some studies describing the precise expression, prognostic impact and methylation status of RUNX3 in esophageal cell carcinoma . We conducted a meta-analysis to evaluate the correlation between RUNX3 promoter methylation/low expression and esophageal cancer. Analysis of the pooled data showed that 1) Esophageal cancers had a higher methylation rate than normal tissues, or well-differentiated cancer tissues; 2) The prevalence of lymph node involvement, tumor size (T1-T2 vs T3-T4) and histological grade was significantly greater in RUNX3-negative cases (RUNX3 unmethylated groups) than in RUNX3-positive cases; RUNX3 methylation was significantly higher in EAC than in BE; and 3) The pooled HR for OS showed that decreased RUNX3 expression was associated with worse survival in esophageal cancer. ESCC and EAC are the two major histological types of esophageal carcinoma. ESCC occurs mostly in the endemic areas such as Asia and Africa, whereas EAC is the most common in North America and Europe . Although these two esophageal cancers are different in pathogenesis, epidemiology, tumor biology, prognosis and therapeutic strategies among the pooled patients in this meta analysis, RUNX3 methylation/low expression was associated with the pathogenesis of both types of esophageal cancer, indicating that RUNX3 might be responsible for different patterns of tumorigenesis. Patients with BE have an increased risk of developing EAC. The most established marker for the risk of developing EAC in BE is dysplasia . Because of not well defined natural history of low grade dysplasia (LGD), not well characterized histological classification of dysplasia, moreover especially extremely high interobserver variability for LGD, molecular biomarkers are needed to improve the risk classification of BE patients . It was accepted that BE is a precancerous tissue, and that aberrant promoter methylation occurs early in metaplasia before histological evidence of progression to cancer. Some underlying mechanisms for aberrant DNA methylation in Barrett's metaplasia have been noted. Frequent RUNX3 inactivation through promoter hypermethylation was reported in ESCC, EAC, Barrett's metaplasia and dysplasia . The result of this meta analysis indicated that the methylation of RUNX3 gene was significantly higher in both BE and EAC than in the squamous samples, that metaplasia BE was nearly as abnormal epigenetically as EAC. In other words, the aberrant methylation of RUNX3 gene is an early event, which most probably occurs independently of EAC . RUNX3 exerts pleiotropic effects during tumor suppression. It inhibits the oncogenic Wnt signaling pathway via formation of a complex with the TCF4-b-catenin complex and hampering it from binding to target genes such as c-myc and cyclin D1 . RUNX3 interacts with SMAD3/SMAD4 to regulate TGF-b-dependent inhibition of proliferation and apoptosis by activation of p21 and Bim. Sakakura et al found that frequent silencing of RUNX3 by promoter hypermethylation in esophageal squamous cell carcinomas is associated with radioresistance and poor prognosis. In other words, RUNX3 gene expression promotes radiosensitivity, whereas its inactivation facilitates radioresistance . They further confirmed that RUNX3 activates Bim expression and increases sensitivity to radiation and induces TGF-b-mediated apoptosis in ESCC cells, therefore functioning as a crucial determinant of radiosensitivity . Thus, induction of RUNX3 expression by overcoming gene silencing may enhance radiosensitivity against tumor which may have crucial clinical impact for esophageal cancer patients. It is also exciting that measurement of RUNX3 expression status in pretreatment specimens may predict radiosensitivity . Progression from BE to esophageal cancer appears to mirror the accumulation of genetic abnormalities, suggesting a stepwise progression of genetic changes in esophageal cancer. RUNX3, in combination of a panel of other genes that are inactivated by methylation, can be developed as biomarkers for various tissues. This is becoming a good strategy for risk stratification to predict neoplastic progression including esophageal cancer . Consistent results were shown in sensitivity analyses, and no evidence of publication bias was found. However, this study has several potential limitations. First, the possibility of information and selection biases and unidentified confounders could not be completely excluded because all of the included studies were observational. Second, the searching strategy was restricted to articles published in English and Chinese. Articles with potentially high-quality data that were published in other languages were not included because of anticipated difficulties in obtaining accurate medical translation. Hence, cautions should be taken when our findings are interpreted among the general populations. In conclusion, our meta-analysis showed RUNX3 may play an important role in esophageal cancer initiation and progression. Plasma levels of RUNX3 promoter hypermethylation may be a promising biomarker for the early diagnosis of esophagus squamous cell carcinoma . In addition, RUNX3 methylation is associated with an increased risk and worse survival in esophageal cancer patients. Further large-scale studies, especially multi-center and well-matched cohort research will provide more insight into the role of RUNX3 in the prognosis and clinical implementation of esophageal cancer patients. The fact that silencing of RUNX3 gene at the transcriptional level and functional inactivation at the protein level in esophagus cancer are strongly correlated with poor prognosis and may occur in early phase of tumor initiation, making it a promising target for therapeutic approaches. Maintaining RUNX3 expression under microenvironment stress conditions, either directly or indirectly or reversing RUNX3 silencing could be a new direction for drug discovery for esophageal cancers. Further, RUNX3 was reported to control Notch signaling which is tightly linked to cancer stem cell (CSCs) . Conventional chemotherapy can induce resistance to chemotherapeutic agents, and tumor regrowth mediated by CSCs, therefore targeting RUNX3 gene and its related signaling pathways could be another mechanism for therapeutic approaches for cancer treatment aiming at CSC elimination. Supporting Information Checklist S1 PRISMA checklist.
import { InstanceResult, SetInstanceTestsPayload, TestV670, UpdateInstanceResultsPayload, } from '@src/types'; // fetch tokens from stored _createTestsPayload and "updateTestResults" export const mergeInstanceResults = ( createTestsPayload: SetInstanceTestsPayload, updateInstanceResults: UpdateInstanceResultsPayload ) => { const { config, tests } = createTestsPayload; const mergedTests = updateInstanceResults.tests.map((t) => { const existingTest = (tests as TestV670[]).find( (i) => i.clientId === t.clientId ); return { ...existingTest, ...t, testId: t.clientId }; }); const instanceResult: InstanceResult = { ...updateInstanceResults, cypressConfig: config, tests: mergedTests, }; return instanceResult; };
// IPRanges converts a Subnet into an array of IPRange. func (subnet *Subnet) IPRanges(site string) []*crimson.IPRange { ipRange := []*crimson.IPRange{} for _, pool := range subnet.pools { ipRange = append(ipRange, &crimson.IPRange{ Site: site, VlanId: subnet.vlanID, StartIp: pool.startIP, EndIp: pool.endIP, VlanAlias: subnet.vlanName, }) } return ipRange }
// NewMockAcpTranslatorLoop creates a new mock instance. func NewMockAcpTranslatorLoop(ctrl *gomock.Controller) *MockAcpTranslatorLoop { mock := &MockAcpTranslatorLoop{ctrl: ctrl} mock.recorder = &MockAcpTranslatorLoopMockRecorder{mock} return mock }
<reponame>quiteqiang/corpus-christi import os from flask import json from marshmallow import Schema, fields from marshmallow.validate import Length, Range from sqlalchemy import Column, String, ForeignKey, Integer, Float, Boolean from sqlalchemy.orm import relationship from src.db import Base from src.i18n.models import i18n_create, I18NLocale, i18n_check from .. import db from ..shared.models import StringTypes # ---- Country class Country(Base): """Country; uses ISO 3166-1 country codes""" __tablename__ = 'places_country' code = Column(String(2), primary_key=True) name_i18n = Column(StringTypes.I18N_KEY, ForeignKey('i18n_key.id'), nullable=False) key = relationship('I18NKey', backref='countries', lazy=True) def __repr__(self): return f"<Country(code={self.code},i18n_key='{self.name_i18n}')>" @classmethod def load_from_file(cls, file_name='country-codes.json'): count = 0 file_path = os.path.abspath(os.path.join(__file__, os.path.pardir, 'data', file_name)) with open(file_path, 'r') as fp: countries = json.load(fp) for country in countries: country_code = country['Code'] country_name = country['Name'] name_i18n = f'country.name.{country_code}' for locale in country['locales']: locale_code = locale['locale_code'] # e.g., en-US if not db.session.query(I18NLocale).get(locale_code): # Don't have this locale code db.session.add(I18NLocale(code=locale_code, desc='')) if not i18n_check(name_i18n, locale_code): # Don't have this country i18n_create(name_i18n, locale_code, locale['name'], description=f"Country {country_name}") # Add to the Country table. if not db.session.query(cls).filter_by(code=country_code).count(): db.session.add(cls(code=country_code, name_i18n=name_i18n)) count += 1 db.session.commit() fp.close() return count class CountrySchema(Schema): code = fields.String() name_i18n = fields.String() # ---- Area class Area(Base): """Generic area within country (e.g., state, province)""" __tablename__ = 'places_area' id = Column(Integer, primary_key=True) name = Column(StringTypes.MEDIUM_STRING, nullable=False) country_code = Column(String(2), ForeignKey( 'places_country.code'), nullable=False) addresses = relationship('Address', backref='areas', passive_deletes=True) country = relationship('Country', backref='areas', lazy=True) active = Column(Boolean, nullable=False, default=True) def __repr__(self): return f"<Area(name={self.name},Country Code='{self.country_code}')>" class AreaSchema(Schema): id = fields.Integer(dump_only=True, required=True, validate=Range(min=1)) name = fields.String(required=True, validate=Length(min=1)) country_code = fields.String(required=True, validate=Length(min=1)) active = fields.Boolean(missing=1) country = fields.Nested('CountrySchema') # ---- Location class Location(Base): __tablename__ = 'places_location' id = Column(Integer, primary_key=True, nullable=False) description = Column(StringTypes.MEDIUM_STRING) address_id = Column(Integer, ForeignKey( 'places_address.id'), nullable=False) address = relationship('Address', back_populates='locations', lazy=True) events = relationship('Event', back_populates="location") assets = relationship('Asset', back_populates="location") images = relationship('ImageLocation', back_populates="location") active = Column(Boolean, nullable=False, default=True) def __repr__(self): attributes = [f"id='{self.id}'"] for attr in ['description', "address_id"]: if hasattr(self, attr): value = getattr(self, attr) attributes.append(f"{attr}={value}") as_string = ",".join(attributes) return f"<Location({as_string})>" class LocationSchema(Schema): id = fields.Integer(dump_only=True, required=False, validate=Range(min=1)) description = fields.String(required=False) address_id = fields.Integer(required=True, validate=Range(min=1)) active = fields.Boolean(missing=1) address = fields.Nested('AddressSchema') # ---- Address class Address(Base): __tablename__ = 'places_address' id = Column(Integer, primary_key=True, nullable=False) name = Column(StringTypes.MEDIUM_STRING, nullable=False) address = Column(StringTypes.LONG_STRING, nullable=False) city = Column(StringTypes.MEDIUM_STRING, nullable=False) area_id = Column(Integer, ForeignKey( 'places_area.id', ondelete='CASCADE'), nullable=False) country_code = Column(StringTypes.SHORT_STRING, ForeignKey( 'places_country.code'), nullable=False) latitude = Column(Float) longitude = Column(Float) # area = relationship('Area', backref='addresses', lazy=True) country = relationship('Country', backref='addresses', lazy=True) meetings = relationship('Meeting', back_populates='address', lazy=True) locations = relationship('Location', back_populates='address', lazy=True) active = Column(Boolean, nullable=False, default=True) def __repr__(self): attributes = [f"id='{self.id}'"] for attr in ['name', 'address', 'city', 'area_id', 'country_code', 'latitude', 'longitude']: if hasattr(self, attr): value = getattr(self, attr) attributes.append(f"{attr}={value}") as_string = ",".join(attributes) return f"<Address({as_string})>" class AddressSchema(Schema): id = fields.Integer(dump_only=True, required=False, validate=Range(min=1)) name = fields.String(required=True, validate=Length(min=1)) address = fields.String(required=True, validate=Length(min=1)) city = fields.String(required=True, validate=Length(min=1)) area_id = fields.Integer(required=True, validate=Range(min=1)) country_code = fields.String(required=True) latitude = fields.Float() longitude = fields.Float() active = fields.Boolean(missing=1) area = fields.Nested('AreaSchema') country = fields.Nested('CountrySchema')
<gh_stars>1-10 package models import "github.com/jinzhu/gorm" type Group struct { gorm.Model GroupchatId int64 LatestPollId int Activated *bool `gorm:"default:true"` }
// isCookieValid checks if the supplied cookie is valid for the given id and // sequence number. If it is, it also returns the data originally encoded in // the cookie when createCookie was created func (l *listenContext) isCookieValid(id types.TransportEndpointId, cookie seqnum.Value, seq seqnum.Value) (uint32, bool) { ts := timeStamp() v := uint32(cookie) - l.cookieHash(id, 0, 0) - uint32(seq) cookieTS := v >> tsOffset if ((ts - cookieTS) & tsMask) > maxTSDiff { return 0, false } return (v - l.cookieHash(id, cookieTS, 1)) & hashMask, true }
/** * Converts ES6 generator functions to valid ES3 code. This pass runs after all ES6 features * except for yield and generators have been transpiled. * * @author [email protected] (Matthew Loring) */ public class Es6RewriteGenerators implements NodeTraversal.Callback, HotSwapCompilerPass { private final AbstractCompiler compiler; // The current case statement onto which translated statements from the // body of a generator will be appended. private Node enclosingBlock; // The destination for vars defined in the body of a generator. private Node hoistRoot; // The body of the generator function currently being translated. private Node originalGeneratorBody; // The current statement being translated. private Node currentStatement; private static final String ITER_KEY = "$$iterator"; // The name of the variable that holds the state at which the generator // should resume execution after a call to yield or return. // The beginning state is 0 and the end state is -1. private static final String GENERATOR_STATE = "$jscomp$generator$state"; private static int generatorCaseCount; private static final String GENERATOR_DO_WHILE_INITIAL = "$jscomp$generator$first$do"; private static final String GENERATOR_YIELD_ALL_NAME = "$jscomp$generator$yield$all"; private static final String GENERATOR_YIELD_ALL_ENTRY = "$jscomp$generator$yield$entry"; private static final String GENERATOR_ARGUMENTS = "$jscomp$generator$arguments"; private static final String GENERATOR_THIS = "$jscomp$generator$this"; private static final String GENERATOR_NEXT_ARG = "$jscomp$generator$next$arg"; private static final String GENERATOR_THROW_ARG = "$jscomp$generator$throw$arg"; private Supplier<String> generatorCounter; private static final String GENERATOR_SWITCH_ENTERED = "$jscomp$generator$switch$entered"; private static final String GENERATOR_SWITCH_VAL = "$jscomp$generator$switch$val"; private static final String GENERATOR_FINALLY_JUMP = "$jscomp$generator$finally"; private static final String GENERATOR_ERROR = "$jscomp$generator$global$error"; private static final String GENERATOR_FOR_IN_ARRAY = "$jscomp$generator$forin$array"; private static final String GENERATOR_FOR_IN_VAR = "$jscomp$generator$forin$var"; private static final String GENERATOR_FOR_IN_ITER = "$jscomp$generator$forin$iter"; private static final String GENERATOR_LOOP_GUARD = "$jscomp$generator$loop$guard"; // Maintains a stack of numbers which identify the cases which mark the end of loops. These // are used to manage jump destinations for break and continue statements. private List<LoopContext> currentLoopContext; private List<ExceptionContext> currentExceptionContext; private boolean hasTranslatedTry; public Es6RewriteGenerators(AbstractCompiler compiler) { this.compiler = compiler; this.currentLoopContext = new ArrayList<>(); this.currentExceptionContext = new ArrayList<>(); generatorCounter = compiler.getUniqueNameIdSupplier(); } @Override public void process(Node externs, Node root) { NodeTraversal.traverse(compiler, root, this); } @Override public void hotSwapScript(Node scriptRoot, Node originalRoot) { NodeTraversal.traverse(compiler, scriptRoot, this); } @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { Node enclosing = NodeUtil.getEnclosingFunction(n); if (enclosing == null || !enclosing.isGeneratorFunction() || !NodeUtil.isLoopStructure(n) || NodeUtil.isForIn(n)) { return true; } Node guard = null, incr = null; switch (n.getType()) { case Token.FOR: guard = n.getFirstChild().getNext(); incr = guard.getNext(); break; case Token.WHILE: guard = n.getFirstChild(); incr = IR.empty(); break; case Token.DO: guard = n.getLastChild(); incr = IR.empty(); break; } if (!controlCanExit(guard) && !controlCanExit(incr)) { return true; } Node guardName = IR.name(GENERATOR_LOOP_GUARD + generatorCounter.get()); if (!guard.isEmpty()) { Node container = new Node(Token.BLOCK); n.replaceChild(guard, container); container.addChildToFront(IR.block(IR.exprResult(IR.assign( guardName.cloneTree(), guard.cloneTree())))); container.addChildToBack(guardName.cloneTree()); } if (!incr.isEmpty()) { n.addChildBefore(IR.block(IR.exprResult(incr.detachFromParent())), n.getLastChild()); } Node block = NodeUtil.getEnclosingType(n, Token.BLOCK); block.addChildToFront(IR.var(guardName)); return true; } @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getType()) { case Token.FUNCTION: if (n.isGeneratorFunction()) { generatorCaseCount = 0; visitGenerator(n, parent); } break; case Token.NAME: Node enclosing = NodeUtil.getEnclosingFunction(n); if (enclosing != null && enclosing.isGeneratorFunction() && n.matchesQualifiedName("arguments")) { n.setString(GENERATOR_ARGUMENTS); } break; case Token.THIS: enclosing = NodeUtil.getEnclosingFunction(n); if (enclosing != null && enclosing.isGeneratorFunction()) { n.getParent().replaceChild(n, IR.name(GENERATOR_THIS)); } break; case Token.YIELD: if (n.isYieldFor()) { visitYieldFor(n, parent); } else if (!parent.isExprResult()) { visitYieldExpr(n, parent); } else { visitYieldThrows(parent, parent.getParent()); } break; } } private void visitYieldThrows(Node n, Node parent) { Node ifThrows = IR.ifNode( IR.shne(IR.name(GENERATOR_THROW_ARG), IR.name("undefined")), IR.block(IR.throwNode(IR.name(GENERATOR_THROW_ARG)))); parent.addChildAfter(ifThrows, n); compiler.reportCodeChange(); } /** * Sample translation: * * <code> * var i = yield * gen(); * </code> * * is rewritten to: * * <code> * var $jscomp$generator$yield$all = gen(); * var $jscomp$generator$yield$entry; * while (!($jscomp$generator$yield$entry = * $jscomp$generator$yield$all.next($jscomp$generator$next$arg)).done) { * yield $jscomp$generator$yield$entry.value; * } * var i = $jscomp$generator$yield$entry.value; * </code> */ private void visitYieldFor(Node n, Node parent) { Node enclosingStatement = NodeUtil.getEnclosingStatement(n); Node generator = IR.var( IR.name(GENERATOR_YIELD_ALL_NAME), IR.call( NodeUtil.newQName(compiler, Es6ToEs3Converter.MAKE_ITER), n.removeFirstChild())); Node entryDecl = IR.var(IR.name(GENERATOR_YIELD_ALL_ENTRY)); Node assignIterResult = IR.assign( IR.name(GENERATOR_YIELD_ALL_ENTRY), IR.call(IR.getprop(IR.name(GENERATOR_YIELD_ALL_NAME), IR.string("next")), IR.name(GENERATOR_NEXT_ARG))); Node loopCondition = IR.not(IR.getprop(assignIterResult, IR.string("done"))); Node elemValue = IR.getprop(IR.name(GENERATOR_YIELD_ALL_ENTRY), IR.string("value")); Node yieldStatement = IR.exprResult(IR.yield(elemValue.cloneTree())); Node loop = IR.whileNode(loopCondition, IR.block(yieldStatement)); enclosingStatement.getParent().addChildBefore(generator, enclosingStatement); enclosingStatement.getParent().addChildBefore(entryDecl, enclosingStatement); enclosingStatement.getParent().addChildBefore(loop, enclosingStatement); if (parent.isExprResult()) { parent.detachFromParent(); } else { parent.replaceChild(n, elemValue); } visitYieldThrows(yieldStatement, yieldStatement.getParent()); compiler.reportCodeChange(); } private void visitYieldExpr(Node n, Node parent) { Node enclosingStatement = NodeUtil.getEnclosingStatement(n); Node yieldStatement = IR.exprResult( n.hasChildren() ? IR.yield(n.removeFirstChild()) : IR.yield()); Node yieldResult = IR.name(GENERATOR_NEXT_ARG + generatorCounter.get()); Node yieldResultDecl = IR.var(yieldResult.cloneTree(), IR.name(GENERATOR_NEXT_ARG)); parent.replaceChild(n, yieldResult); enclosingStatement.getParent().addChildBefore(yieldStatement, enclosingStatement); enclosingStatement.getParent().addChildBefore(yieldResultDecl, enclosingStatement); visitYieldThrows(yieldStatement, yieldStatement.getParent()); compiler.reportCodeChange(); } private void visitGenerator(Node n, Node parent) { hasTranslatedTry = false; Node genBlock = compiler.parseSyntheticCode(Joiner.on('\n').join( "function generatorBody() {", " var " + GENERATOR_STATE + " = " + generatorCaseCount + ";", " function $jscomp$generator$impl(" + GENERATOR_NEXT_ARG + ", ", " " + GENERATOR_THROW_ARG + ") {", " while (1) switch (" + GENERATOR_STATE + ") {", " case " + generatorCaseCount + ":", " default:", " return {value: undefined, done: true};", " }", " }", " return {", " " + ITER_KEY + ": function() { return this; },", " next: function(arg){ return $jscomp$generator$impl(arg, undefined); },", " throw: function(arg){ return $jscomp$generator$impl(undefined, arg); },", " }", "}" )).getFirstChild().getLastChild().detachFromParent(); generatorCaseCount++; originalGeneratorBody = n.getLastChild(); n.replaceChild(originalGeneratorBody, genBlock); n.setIsGeneratorFunction(false); //TODO(mattloring): remove this suppression once we can optimize the switch statement to // remove unused cases. JSDocInfoBuilder builder; if (n.getJSDocInfo() == null) { builder = new JSDocInfoBuilder(true); } else { builder = JSDocInfoBuilder.copyFrom(n.getJSDocInfo()); } //TODO(mattloring): copy existing suppressions. builder.recordSuppressions(ImmutableSet.of("uselessCode")); JSDocInfo info = builder.build(n); n.setJSDocInfo(info); // Set state to the default after the body of the function has completed. originalGeneratorBody.addChildToBack( IR.exprResult(IR.assign(IR.name(GENERATOR_STATE), IR.number(-1)))); enclosingBlock = getUnique(genBlock, Token.CASE).getLastChild(); hoistRoot = getUnique(genBlock, Token.VAR); if (NodeUtil.isNameReferenced(originalGeneratorBody, GENERATOR_ARGUMENTS)) { hoistRoot.getParent().addChildAfter( IR.var(IR.name(GENERATOR_ARGUMENTS), IR.name("arguments")), hoistRoot); } if (NodeUtil.isNameReferenced(originalGeneratorBody, GENERATOR_THIS)) { hoistRoot.getParent().addChildAfter( IR.var(IR.name(GENERATOR_THIS), IR.thisNode()), hoistRoot); } while (originalGeneratorBody.hasChildren()) { currentStatement = originalGeneratorBody.removeFirstChild(); boolean advanceCase = translateStatementInOriginalBody(); if (advanceCase) { int caseNumber; if (currentStatement.isGeneratorMarker()) { caseNumber = (int) currentStatement.getDouble(); } else { caseNumber = generatorCaseCount; generatorCaseCount++; } Node oldCase = enclosingBlock.getParent(); Node newCase = IR.caseNode(IR.number(caseNumber), IR.block()); enclosingBlock = newCase.getLastChild(); if (oldCase.isTry()) { oldCase = oldCase.getParent().getParent(); if (!currentExceptionContext.isEmpty()) { Node newTry = IR.tryCatch(IR.block(), currentExceptionContext.get(0).catchBlock.cloneTree()); newCase.getLastChild().addChildToBack(newTry); enclosingBlock = newCase.getLastChild().getLastChild().getFirstChild(); } } oldCase.getParent().addChildAfter(newCase, oldCase); } } parent.useSourceInfoIfMissingFromForTree(parent); compiler.reportCodeChange(); } /** Returns true if a new case node should be added */ private boolean translateStatementInOriginalBody() { if (currentStatement.isVar()) { visitVar(); return false; } else if (currentStatement.isGeneratorMarker()) { visitGeneratorMarker(); return true; } else if (currentStatement.isFunction()) { visitFunctionStatement(); return false; } else if (currentStatement.isBlock()) { visitBlock(); return false; } else if (controlCanExit(currentStatement)) { switch (currentStatement.getType()) { case Token.WHILE: case Token.DO: case Token.FOR: if (NodeUtil.isForIn(currentStatement)) { visitForIn(); return false; } visitLoop(null); return false; case Token.LABEL: visitLabel(); return false; case Token.SWITCH: visitSwitch(); return false; case Token.IF: if (!currentStatement.isGeneratorSafe()) { visitIf(); return false; } break; case Token.TRY: visitTry(); return false; case Token.EXPR_RESULT: if (currentStatement.getFirstChild().isYield()) { visitYieldExprResult(); return true; } break; case Token.RETURN: visitReturn(); return false; case Token.CONTINUE: visitContinue(); return false; case Token.BREAK: if (!currentStatement.isGeneratorSafe()) { visitBreak(); return false; } break; case Token.THROW: visitThrow(); return false; default: // We never want to copy over an untranslated statement for which control exits. throw new RuntimeException( "Untranslatable control-exiting statement in generator function: " + Token.name(currentStatement.getType())); } } // In the default case, add the statement to the current case block unchanged. enclosingBlock.addChildToBack(currentStatement); return false; } private void visitFunctionStatement() { hoistRoot.getParent().addChildAfter(currentStatement, hoistRoot); } private void visitTry() { Node tryBody = currentStatement.getFirstChild(); Node caughtError; Node catchBody; Node catchBlock = tryBody.getNext(); if (catchBlock.hasChildren()) { // There is a catch block caughtError = catchBlock.getFirstChild().removeFirstChild(); catchBody = catchBlock.getFirstChild().removeFirstChild(); } else { caughtError = IR.name(GENERATOR_ERROR + "temp"); catchBody = IR.block(IR.throwNode(caughtError.cloneTree())); catchBody.getFirstChild().setGeneratorSafe(true); } Node finallyBody = catchBlock.getNext(); int catchStartState = generatorCaseCount++; Node catchStart = IR.number(catchStartState); catchStart.setGeneratorMarker(true); Node errorNameGenerated = IR.name("$jscomp$generator$" + caughtError.getString()); originalGeneratorBody.addChildToFront(catchStart); originalGeneratorBody.addChildAfter(catchBody, catchStart); Node assignError = IR.assign(IR.name(GENERATOR_ERROR), errorNameGenerated.cloneTree()); Node newCatchBody = IR.block(IR.exprResult(assignError), createStateUpdate(catchStartState), createSafeBreak()); Node newCatch = IR.catchNode(errorNameGenerated, newCatchBody); currentExceptionContext.add(0, new ExceptionContext(catchStartState, newCatch)); if (finallyBody != null) { Node finallyName = IR.name(GENERATOR_FINALLY_JUMP + generatorCounter.get()); int finallyStartState = generatorCaseCount++; Node finallyStart = IR.number(finallyStartState); finallyStart.setGeneratorMarker(true); int finallyEndState = generatorCaseCount++; Node finallyEnd = IR.number(finallyEndState); finallyEnd.setGeneratorMarker(true); NodeTraversal.traverse(compiler, tryBody, new ControlExitsCheck(finallyName, finallyStartState)); NodeTraversal.traverse(compiler, catchBody, new ControlExitsCheck(finallyName, finallyStartState)); originalGeneratorBody.addChildToFront(tryBody.detachFromParent()); originalGeneratorBody.addChildAfter(finallyStart, catchBody); originalGeneratorBody.addChildAfter(finallyBody.detachFromParent(), finallyStart); originalGeneratorBody.addChildAfter(finallyEnd, finallyBody); originalGeneratorBody.addChildToFront(IR.var(finallyName.cloneTree())); finallyBody.addChildToBack(IR.exprResult( IR.assign(IR.name(GENERATOR_STATE), finallyName.cloneTree()))); finallyBody.addChildToBack(createSafeBreak()); tryBody.addChildToBack(IR.exprResult( IR.assign(finallyName.cloneTree(), IR.number(finallyEndState)))); tryBody.addChildToBack(createStateUpdate(finallyStartState)); tryBody.addChildToBack(createSafeBreak()); catchBody.addChildToBack(IR.exprResult( IR.assign(finallyName.cloneTree(), IR.number(finallyEndState)))); } else { int catchEndState = generatorCaseCount++; Node catchEnd = IR.number(catchEndState); catchEnd.setGeneratorMarker(true); originalGeneratorBody.addChildAfter(catchEnd, catchBody); tryBody.addChildToBack(createStateUpdate(catchEndState)); tryBody.addChildToBack(createSafeBreak()); originalGeneratorBody.addChildToFront(tryBody.detachFromParent()); } catchBody.addChildToFront(IR.var(caughtError, IR.name(GENERATOR_ERROR))); if (enclosingBlock.getParent().isTry()) { enclosingBlock = enclosingBlock.getParent().getParent(); } enclosingBlock.addChildToBack( IR.tryCatch(IR.block(), newCatch)); enclosingBlock = enclosingBlock.getLastChild().getFirstChild(); if (!hasTranslatedTry) { hasTranslatedTry = true; hoistRoot.getParent().addChildAfter(IR.var(IR.name(GENERATOR_ERROR)), hoistRoot); } } private void visitContinue() { Preconditions.checkState(currentLoopContext.get(0).continueCase != -1); int continueCase; if (currentStatement.hasChildren()) { continueCase = getLoopContext( currentStatement.removeFirstChild().getString()).continueCase; } else { continueCase = currentLoopContext.get(0).continueCase; } enclosingBlock.addChildToBack( createStateUpdate(continueCase)); enclosingBlock.addChildToBack(createSafeBreak()); } private void visitThrow() { enclosingBlock.addChildToBack(createStateUpdate(-1)); enclosingBlock.addChildToBack(currentStatement); } private void visitBreak() { int breakCase; if (currentStatement.hasChildren()) { LoopContext loop = getLoopContext(currentStatement.removeFirstChild().getString()); if (loop == null) { compiler.report(JSError.make(currentStatement, Es6ToEs3Converter.CANNOT_CONVERT_YET, "Breaking to a label that is not a loop")); return; } breakCase = loop.breakCase; } else { breakCase = currentLoopContext.get(0).breakCase; } enclosingBlock.addChildToBack( createStateUpdate(breakCase)); enclosingBlock.addChildToBack(createSafeBreak()); } private void visitLabel() { Node labelName = currentStatement.removeFirstChild(); Node child = currentStatement.removeFirstChild(); if (NodeUtil.isLoopStructure(child)) { currentStatement = child; visitLoop(labelName.getString()); } else { originalGeneratorBody.addChildToFront(child); } } /** * If we reach the marker cooresponding to the end of the current loop, * pop the loop information off of our stack. */ private void visitGeneratorMarker() { if (!currentLoopContext.isEmpty() && currentLoopContext.get(0).breakCase == currentStatement.getDouble()) { currentLoopContext.remove(0); } if (!currentExceptionContext.isEmpty() && currentExceptionContext.get(0).catchStartCase == currentStatement.getDouble()) { currentExceptionContext.remove(0); } } /** * {@code if} statements have their bodies lifted to the function top level * and use a case statement to jump over the body if the condition of the * if statement is false. */ private void visitIf() { Node condition = currentStatement.removeFirstChild(); Node ifBody = currentStatement.removeFirstChild(); boolean hasElse = currentStatement.hasChildren(); int ifEndState = generatorCaseCount++; Node invertedConditional = IR.ifNode(IR.not(condition), IR.block(createStateUpdate(ifEndState), createSafeBreak())); invertedConditional.setGeneratorSafe(true); Node endIf = IR.number(ifEndState); endIf.setGeneratorMarker(true); originalGeneratorBody.addChildToFront(invertedConditional); originalGeneratorBody.addChildAfter(ifBody, invertedConditional); originalGeneratorBody.addChildAfter(endIf, ifBody); if (hasElse) { Node elseBlock = currentStatement.removeFirstChild(); int elseEndState = generatorCaseCount++; Node endElse = IR.number(elseEndState); endElse.setGeneratorMarker(true); ifBody.addChildToBack(createStateUpdate(elseEndState)); ifBody.addChildToBack(createSafeBreak()); originalGeneratorBody.addChildAfter(elseBlock, endIf); originalGeneratorBody.addChildAfter(endElse, elseBlock); } } /** * Switch statements are translated into a series of if statements. * * <code> * switch (i) { * case 1: * s; * case 2: * t; * ... * } * </code> * * is eventually rewritten to: * * <code> * $jscomp$generator$switch$entered0 = false; * if ($jscomp$generator$switch$entered0 || i == 1) { * $jscomp$generator$switch$entered0 = true; * s; * } * if ($jscomp$generator$switch$entered0 || i == 2) { * $jscomp$generator$switch$entered0 = true; * t; * } * ... * * </code> */ private void visitSwitch() { Node didEnter = IR.name(GENERATOR_SWITCH_ENTERED + generatorCounter.get()); Node didEnterDecl = IR.var(didEnter.cloneTree(), IR.falseNode()); Node switchVal = IR.name(GENERATOR_SWITCH_VAL + generatorCounter.get()); Node switchValDecl = IR.var(switchVal.cloneTree(), currentStatement.removeFirstChild()); originalGeneratorBody.addChildToFront(didEnterDecl); originalGeneratorBody.addChildAfter(switchValDecl, didEnterDecl); Node insertionPoint = switchValDecl; while (currentStatement.hasChildren()) { Node currCase = currentStatement.removeFirstChild(); Node equivBlock; currCase.getLastChild().addChildToFront( IR.exprResult(IR.assign(didEnter.cloneTree(), IR.trueNode()))); if (currCase.isDefaultCase()) { if (currentStatement.hasChildren()) { compiler.report(JSError.make(currentStatement, Es6ToEs3Converter.CANNOT_CONVERT_YET, "Default case as intermediate case")); } equivBlock = IR.block(currCase.removeFirstChild()); } else { equivBlock = IR.ifNode(IR.or(didEnter.cloneTree(), IR.sheq(switchVal.cloneTree(), currCase.removeFirstChild())), currCase.removeFirstChild()); } originalGeneratorBody.addChildAfter(equivBlock, insertionPoint); insertionPoint = equivBlock; } int breakTarget = generatorCaseCount++; int cont = currentLoopContext.isEmpty() ? -1 : currentLoopContext.get(0).continueCase; currentLoopContext.add(0, new LoopContext(breakTarget, cont, null)); Node breakCase = IR.number(breakTarget); breakCase.setGeneratorMarker(true); originalGeneratorBody.addChildAfter(breakCase, insertionPoint); } /** * Blocks are flattened by lifting all children to the body of the original generator. */ private void visitBlock() { if (currentStatement.getChildCount() == 0) { return; } Node insertionPoint = currentStatement.removeFirstChild(); originalGeneratorBody.addChildToFront(insertionPoint); for (Node child = currentStatement.removeFirstChild(); child != null; child = currentStatement.removeFirstChild()) { originalGeneratorBody.addChildAfter(child, insertionPoint); insertionPoint = child; } } /** * For in loops are eventually translated to a for in loop which produces an array of * values iterated over followed by a plain for loop which performs the logic * contained in the body of the original for in. * * <code> * for (i in j) { * s; * } * </code> * * is eventually rewritten to: * * <code> * $jscomp$arr = [] * $jscomp$iter = j * for (i in $jscomp$iter) { * $jscomp$arr.push(i); * } * for ($jscomp$var = 0; $jscomp$var < $jscomp$arr.length; $jscomp$var++) { * i = $jscomp$arr[$jscomp$var]; * if (!(i in $jscomp$iter)) { * continue; * } * s; * } * </code> */ private void visitForIn() { Node variable = currentStatement.removeFirstChild(); Node iterable = currentStatement.removeFirstChild(); Node body = currentStatement.removeFirstChild(); String loopId = generatorCounter.get(); Node arrayName = IR.name(GENERATOR_FOR_IN_ARRAY + loopId); Node varName = IR.name(GENERATOR_FOR_IN_VAR + loopId); Node iterableName = IR.name(GENERATOR_FOR_IN_ITER + loopId); if (variable.isVar()) { variable = variable.removeFirstChild(); } body.addChildToFront(IR.ifNode( IR.not(IR.in(variable.cloneTree(), iterableName.cloneTree())), IR.block(IR.continueNode()))); body.addChildToFront(IR.var(variable.cloneTree(), IR.getelem(arrayName.cloneTree(), varName.cloneTree()))); hoistRoot.getParent().addChildAfter(IR.var(arrayName.cloneTree()), hoistRoot); hoistRoot.getParent().addChildAfter(IR.var(varName.cloneTree()), hoistRoot); hoistRoot.getParent().addChildAfter(IR.var(iterableName.cloneTree()), hoistRoot); Node arrayDef = IR.exprResult(IR.assign(arrayName.cloneTree(), IR.arraylit())); Node iterDef = IR.exprResult(IR.assign(iterableName.cloneTree(), iterable)); Node newForIn = IR.forIn(variable.cloneTree(), iterableName, IR.block(IR.exprResult( IR.call(IR.getprop(arrayName.cloneTree(), IR.string("push")), variable)))); Node newFor = IR.forNode(IR.assign(varName.cloneTree(), IR.number(0)), IR.lt(varName.cloneTree(), IR.getprop(arrayName, IR.string("length"))), IR.inc(varName, true), body); enclosingBlock.addChildToBack(arrayDef); enclosingBlock.addChildToBack(iterDef); enclosingBlock.addChildToBack(newForIn); originalGeneratorBody.addChildToFront(newFor); } /** * Loops are eventually translated to a case statement followed by an if statement * containing the loop body. The if statement finishes by * jumping back to the initial case statement to enter the loop again. * In the case of for and do loops, initialization and post loop statements are inserted * before and after the if statement. Below is a sample translation for a while loop: * * <code> * while (b) { * s; * } * </code> * * is eventually rewritten to: * * <code> * case n: * if (b) { * s; * state = n; * break; * } * </code> */ private void visitLoop(String label) { Node initializer; Node guard; Node incr; Node body; // Used only in the case of DO loops. Node firstEntry; if (currentStatement.isWhile()) { guard = currentStatement.removeFirstChild(); firstEntry = null; body = currentStatement.removeFirstChild(); initializer = IR.empty(); incr = IR.empty(); } else if (currentStatement.isFor()) { initializer = currentStatement.removeFirstChild(); if (initializer.isAssign()) { initializer = IR.exprResult(initializer); } guard = currentStatement.removeFirstChild(); firstEntry = null; incr = currentStatement.removeFirstChild(); body = currentStatement.removeFirstChild(); } else { Preconditions.checkState(currentStatement.isDo()); firstEntry = IR.name(GENERATOR_DO_WHILE_INITIAL); initializer = IR.var(firstEntry.cloneTree(), IR.trueNode()); incr = IR.assign(firstEntry.cloneTree(), IR.falseNode()); body = currentStatement.removeFirstChild(); guard = currentStatement.removeFirstChild(); } Node condition, prestatement; if (guard.isBlock()) { prestatement = guard.removeFirstChild(); condition = guard.removeFirstChild(); } else { prestatement = IR.block(); condition = guard; } if (currentStatement.isDo()) { condition = IR.or(firstEntry, condition); } int loopBeginState = generatorCaseCount++; int continueState = loopBeginState; if (!incr.isEmpty()) { continueState = generatorCaseCount++; Node continueCase = IR.number(continueState); continueCase.setGeneratorMarker(true); body.addChildToBack(continueCase); body.addChildToBack(incr.isBlock() ? incr : IR.exprResult(incr)); } currentLoopContext.add(0, new LoopContext(generatorCaseCount, continueState, label)); Node beginCase = IR.number(loopBeginState); beginCase.setGeneratorMarker(true); Node conditionalBranch = IR.ifNode(condition.isEmpty() ? IR.trueNode() : condition, body); Node setStateLoopStart = createStateUpdate(loopBeginState); Node breakToStart = createSafeBreak(); originalGeneratorBody.addChildToFront(conditionalBranch); if (!prestatement.isEmpty()) { originalGeneratorBody.addChildToFront(prestatement); } originalGeneratorBody.addChildToFront(beginCase); if (!initializer.isEmpty()) { originalGeneratorBody.addChildToFront(initializer); } body.addChildToBack(setStateLoopStart); body.addChildToBack(breakToStart); } /** * {@code var} statements are hoisted into the closure containing the iterator * to preserve their state accross * multiple calls to next(). */ private void visitVar() { Node name = currentStatement.removeFirstChild(); while (name != null) { if (name.hasChildren()) { enclosingBlock.addChildToBack( IR.exprResult(IR.assign(name, name.removeFirstChild()))); } hoistRoot.getParent().addChildAfter(IR.var(name.cloneTree()), hoistRoot); name = currentStatement.removeFirstChild(); } } /** * {@code yield} sets the state so that execution resume at the next statement * when the function is next called and then returns an iterator result with * the desired value. */ private void visitYieldExprResult() { enclosingBlock.addChildToBack(createStateUpdate()); Node yield = currentStatement.getFirstChild(); Node value = yield.hasChildren() ? yield.removeFirstChild() : IR.name("undefined"); enclosingBlock.addChildToBack(IR.returnNode( createIteratorResult(value, false))); } /** * {@code return} statements are translated to set the state to done before returning the * desired value. */ private void visitReturn() { enclosingBlock.addChildToBack(createStateUpdate(-1)); enclosingBlock.addChildToBack(IR.returnNode( createIteratorResult(currentStatement.hasChildren() ? currentStatement.removeFirstChild() : IR.name("undefined"), true))); } private static Node createStateUpdate() { return IR.exprResult( IR.assign(IR.name(GENERATOR_STATE), IR.number(generatorCaseCount))); } private static Node createStateUpdate(int state) { return IR.exprResult( IR.assign(IR.name(GENERATOR_STATE), IR.number(state))); } private static Node createIteratorResult(Node value, boolean done) { return IR.objectlit( IR.propdef(IR.stringKey("value"), value), IR.propdef(IR.stringKey("done"), done ? IR.trueNode() : IR.falseNode())); } private static Node createSafeBreak() { Node breakNode = IR.breakNode(); breakNode.setGeneratorSafe(true); return breakNode; } private static Node createFinallyJumpBlock(Node finallyName, int finallyStartState) { int jumpPoint = generatorCaseCount++; Node setReturnState = IR.exprResult( IR.assign(finallyName.cloneTree(), IR.number(jumpPoint))); Node toFinally = createStateUpdate(finallyStartState); Node returnPoint = IR.number(jumpPoint); returnPoint.setGeneratorMarker(true); Node returnBlock = IR.block(setReturnState, toFinally, createSafeBreak()); returnBlock.addChildToBack(returnPoint); return returnBlock; } private LoopContext getLoopContext(String label) { for (int i = 0; i < currentLoopContext.size(); i++) { if (label.equals(currentLoopContext.get(i).label)) { return currentLoopContext.get(i); } } return null; } private boolean controlCanExit(Node n) { ControlExitsCheck exits = new ControlExitsCheck(); NodeTraversal.traverse(compiler, n, exits); return exits.didExit(); } /** * Finds the only child of the provided node of the given type. */ private Node getUnique(Node node, int type) { List<Node> matches = new ArrayList<>(); insertAll(node, type, matches); Preconditions.checkState(matches.size() == 1); return matches.get(0); } /** * Adds all children of the provided node of the given type to given list. */ private void insertAll(Node node, int type, List<Node> matchingNodes) { if (node.getType() == type) { matchingNodes.add(node); } for (Node c = node.getFirstChild(); c != null; c = c.getNext()) { insertAll(c, type, matchingNodes); } } class ControlExitsCheck implements NodeTraversal.Callback { int continueCatchers = 0; int breakCatchers = 0; int throwCatchers = 0; List<String> labels = new ArrayList<>(); boolean exited = false; boolean addJumps = false; private Node finallyName; private int finallyStartState; public ControlExitsCheck(Node finallyName, int finallyStartState) { this.finallyName = finallyName; this.finallyStartState = finallyStartState; addJumps = true; } public ControlExitsCheck() { addJumps = false; } @Override public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) { switch (n.getType()) { case Token.FUNCTION: return false; case Token.LABEL: labels.add(0, n.getFirstChild().getString()); break; case Token.DO: case Token.WHILE: case Token.FOR: continueCatchers++; breakCatchers++; break; case Token.SWITCH: breakCatchers++; break; case Token.BLOCK: parent = n.getParent(); if (parent != null && parent.isTry() && parent.getFirstChild() == n && n.getNext().hasChildren()) { throwCatchers++; } break; case Token.BREAK: if (!n.isGeneratorSafe() && ((breakCatchers == 0 && !n.hasChildren()) || (n.hasChildren() && !labels.contains(n.getFirstChild().getString())))) { exited = true; if (addJumps) { parent.addChildBefore(createFinallyJumpBlock(finallyName, finallyStartState), n); } } break; case Token.CONTINUE: if (continueCatchers == 0 || (n.hasChildren() && !labels.contains(n.getFirstChild().getString()))) { exited = true; if (addJumps) { parent.addChildBefore(createFinallyJumpBlock(finallyName, finallyStartState), n); } } break; case Token.THROW: if (throwCatchers == 0) { exited = true; if (addJumps && !n.isGeneratorSafe()) { parent.addChildBefore(createFinallyJumpBlock(finallyName, finallyStartState), n); } } break; case Token.RETURN: exited = true; if (addJumps) { parent.addChildBefore(createFinallyJumpBlock(finallyName, finallyStartState), n); } break; case Token.YIELD: exited = true; break; } return true; } @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getType()) { case Token.LABEL: labels.remove(0); break; case Token.DO: case Token.WHILE: case Token.FOR: continueCatchers--; breakCatchers--; break; case Token.SWITCH: breakCatchers--; break; case Token.BLOCK: parent = n.getParent(); if (parent != null && parent.isTry() && parent.getFirstChild() == n && n.getNext().hasChildren()) { throwCatchers--; } break; } } public boolean didExit() { return exited; } } private class LoopContext { int breakCase; int continueCase; String label; public LoopContext(int breakCase, int continueCase, String label) { this.breakCase = breakCase; this.continueCase = continueCase; this.label = label; } } private class ExceptionContext { int catchStartCase; Node catchBlock; public ExceptionContext(int catchStartCase, Node catchBlock) { this.catchStartCase = catchStartCase; this.catchBlock = catchBlock; } } }
package com.qingbo.ginkgo.ygb.common.util; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; public class ServiceRequester { public static Charset UTF8 = Charset.forName("utf-8"); public static String serviceGinkgo = GinkgoConfig.getProperty("service.ginkgo", "http://localhost:8080/service/v1/"); public static String serviceWboss = GinkgoConfig.getProperty("service.wboss", "http://localhost:8080/service/v1/"); private static int connectionRequestTimeout = NumberUtil.parseInt(GinkgoConfig.getProperty("service.requester.connectionRequestTimeout"), 15000); private static int connectionTimeout = NumberUtil.parseInt(GinkgoConfig.getProperty("service.requester.connectionTimeout"), 15000); private static int socketTimeout = NumberUtil.parseInt(GinkgoConfig.getProperty("service.requester.socketTimeout"), 30000); private static int maxConnTotal = NumberUtil.parseInt(GinkgoConfig.getProperty("service.requester.maxConnTotal"), 256); private static int maxConnPerRoute = NumberUtil.parseInt(GinkgoConfig.getProperty("service.requester.maxConnPerRoute"), maxConnTotal); private static RequestConfig requestConfig = RequestConfig.custom() .setConnectionRequestTimeout(connectionRequestTimeout) .setConnectTimeout(connectionTimeout) .setSocketTimeout(socketTimeout) .build(); private static CloseableHttpClient httpClient = HttpClients.custom() .setDefaultRequestConfig(requestConfig) .setMaxConnTotal(maxConnTotal) .setMaxConnPerRoute(maxConnPerRoute) .build(); private static String secret = GinkgoConfig.getProperty("service.controller.secret", "zbnxex5oe9cz"); private static Logger logger = LoggerFactory.getLogger(ServiceRequester.class); static { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { logger.info("shutdown ServiceRequestor httpclient"); httpClient.close(); } catch (IOException e) { logger.warn("fail to shutdown ServiceRequester httpclient", e); } } }); } /** * 提交数据,返回JSON对象 */ public static JSONObject request(String service, Map<String, String> params) { JSONObject json = null; HttpPost post = null; try { post = new HttpPost(service); logger.info("request: "+service); logger.info("with params: "+JSON.toJSONString(params)); if (params != null) { List<NameValuePair> nvps = new ArrayList<NameValuePair>(); for (String name : params.keySet()) { String value = params.get(name); nvps.add(new BasicNameValuePair(name, value)); } post.setEntity(new UrlEncodedFormEntity(nvps, "utf-8")); } HttpResponse response = httpClient.execute(post); int status = response.getStatusLine().getStatusCode(); String content = EntityUtils.toString(response.getEntity(), UTF8); logger.info("status: "+status); logger.info("content: "+content); if(200 == status) { try { json = JSON.parseObject(content); }catch(Exception e) { logger.warn("fail to parse service response to json: "+content); throw new RuntimeException(e); } }else { logger.warn("fail to request service: "+service+", with status: "+status); } } catch (IOException e) { logger.warn(e.getMessage(), e); } catch (RuntimeException e) { if (post != null) { post.abort(); } logger.warn(e.getMessage(), e); } finally { if (post != null) { post.releaseConnection(); } } return json; } public static Map<String, String> paramsWithSecret() { Map<String, String> params = new HashMap<>(); params.put("secret", secret); return params; } @SuppressWarnings("unchecked") public static Map<String, String> convert(Object entity){ Map<String, String> params = new HashMap<>(); String json = JSON.toJSONString(entity); Map<String, ?> map = JSON.parseObject(json, Map.class); for(String key:map.keySet()) { Object value = map.get(key); if(value!=null) params.put(key, value.toString()); } return params; } }
<filename>src/FixedPassageNode.cpp<gh_stars>1-10 /*========================================================================== * Copyright (c) 2004 University of Massachusetts. All Rights Reserved. * * Use of the Lemur Toolkit for Language Modeling and Information Retrieval * is subject to the terms of the software license set forth in the LICENSE * file included with this software, and also available at * http://www.lemurproject.org/license.html * *========================================================================== */ // // FixedPassageNode // // 23 February 2005 -- tds // #include "indri/FixedPassageNode.hpp" #include "indri/Annotator.hpp" #include "lemur/lemur-compat.hpp" indri::infnet::FixedPassageNode::FixedPassageNode( const std::string& name, indri::infnet::BeliefNode* child, int windowSize, int increment ) : _name(name), _child(child), _windowSize(windowSize), _increment(increment) { } lemur::api::DOCID_T indri::infnet::FixedPassageNode::nextCandidateDocument() { return _child->nextCandidateDocument(); } double indri::infnet::FixedPassageNode::maximumBackgroundScore() { return INDRI_TINY_SCORE; } double indri::infnet::FixedPassageNode::maximumScore() { return INDRI_HUGE_SCORE; } // // score // const indri::utility::greedy_vector<indri::api::ScoredExtentResult>& indri::infnet::FixedPassageNode::score( lemur::api::DOCID_T documentID, indri::index::Extent &extent, int documentLength ) { // we're going to run through the field list, etc. _scores.clear(); // find out how to chop up [begin,end] appropriately into passages _buildSubextents( extent ); const indri::utility::greedy_vector<bool>& matches = _child->hasMatch( documentID, _subextents ); // loop through the subextents, scoring each one for( size_t i=0; i<_subextents.size(); i++ ) { int scoreBegin = _subextents[i].begin; int scoreEnd = _subextents[i].end; if( !matches[i] ) continue; const indri::utility::greedy_vector<indri::api::ScoredExtentResult>& childResults = _child->score( documentID, _subextents[i], documentLength ); for( size_t j=0; j<childResults.size(); j++ ) { indri::api::ScoredExtentResult result(childResults[j]); result.document=documentID; result.begin=scoreBegin; result.end=scoreEnd; _scores.push_back( result ); } } return _scores; } // // annotate // void indri::infnet::FixedPassageNode::annotate( indri::infnet::Annotator& annotator, lemur::api::DOCID_T documentID, indri::index::Extent &extent ) { annotator.add(this, documentID, extent); // round down to find where the passage starts _buildSubextents( extent ); const indri::utility::greedy_vector<bool>& matches = _child->hasMatch( documentID, _subextents ); for( size_t i=0; i<_subextents.size(); i++ ) { if (matches[i]) _child->annotate( annotator, documentID, _subextents[i] ); } } // // hasMatch // bool indri::infnet::FixedPassageNode::hasMatch( lemur::api::DOCID_T documentID ) { return _child->hasMatch( documentID ); } // // _addSubextents // void indri::infnet::FixedPassageNode::_addSubextents( const indri::index::Extent& extent ) { int beginPassage = (extent.begin / _increment) * _increment; int endPassage = beginPassage + _windowSize; while( beginPassage <= extent.end - _windowSize ) { int begin = lemur_compat::max( beginPassage, extent.begin ); int end = lemur_compat::min( endPassage, extent.end ); _subextents.push_back( indri::index::Extent( begin, end ) ); beginPassage += _increment; endPassage = beginPassage + _windowSize; } // final passage; may overlap more than other passages if( beginPassage < extent.end ) { int begin = lemur_compat::max( extent.end - _windowSize, beginPassage ); int end = extent.end; _subextents.push_back( indri::index::Extent( begin, end ) ); } } // // _buildSubextents // void indri::infnet::FixedPassageNode::_buildSubextents( const indri::index::Extent& extent ) { _subextents.clear(); _addSubextents( extent ); } // // _buildSubextents // void indri::infnet::FixedPassageNode::_buildSubextents( const indri::utility::greedy_vector<indri::index::Extent>& extents ) { _subextents.clear(); for( size_t i=0; i<extents.size(); i++ ) { _addSubextents( extents[i] ); } } // // hasMatch // const indri::utility::greedy_vector<bool>& indri::infnet::FixedPassageNode::hasMatch( lemur::api::DOCID_T documentID, const indri::utility::greedy_vector<indri::index::Extent>& extents ) { _matches.clear(); _matches.resize( extents.size(), false ); // to match, we split up each extent into its passage components, then check to see if there are sub-matches there. size_t i=0; size_t j=0; _buildSubextents( extents ); // now that we have subextents, ask the child for regions that have results const indri::utility::greedy_vector<bool>& childMatches = _child->hasMatch( documentID, _subextents ); // walk the extents and subextents simultaneously i = 0; j = 0; while( i < childMatches.size() && j < extents.size() ) { if( _subextents[i].begin < extents[j].begin ) { i++; continue; } if( _subextents[i].end > extents[j].end ) { j++; continue; } assert( _subextents[i].begin >= extents[j].begin ); assert( _subextents[i].end <= extents[j].end ); if( childMatches[i] ) { _matches[j] = true; // one match is sufficient, advance extents j++; } else { // it will never match, advance _subextents i++; } } return _matches; } const std::string& indri::infnet::FixedPassageNode::getName() const { return _name; } void indri::infnet::FixedPassageNode::indexChanged( indri::index::Index& index ) { // do nothing }
<reponame>Nickeron-dev/Basics-Of-CPP #include <iostream> #include <vector> using std::cout; using std::endl; void hello_world() { cout << "hello!" << endl; } void print_int(int value) { cout << "Value: " << value << endl; } void for_each(const std::vector<int>& values, void(*func)(int)) { for (int value : values) { func(value); } } int main() { auto func_ptr = hello_world; // gives a memory address to memory instructions to function func_ptr(); func_ptr(); // more complicated way void(*better_ptr)() /* no params */ = hello_world; better_ptr(); // people prefer auto because it is more simple // one more variant typedef void(*print_ptr)(int); print_ptr object = print_int; object(8); std::vector<int> values = {1, 5, 2, 5, 6, 3}; for_each(values, print_int); cout << "=========\n"; for_each(values, [](int value) { cout << "Hello: " << value << endl; }); // you can init that function with brackets in diff lines, too return 0; }
import itertools import random import string import warnings import numpy as np import pandas as pd from plotnine.data import mtcars from plotnine.utils import ( _margins, add_margins, join_keys, match, ninteraction, pivot_apply, remove_missing, uniquecols, ) def test__margins(): vars = [("vs", "am"), ("gear",)] lst = _margins(vars, True) assert lst == [ [], ["vs", "am"], ["am"], ["gear"], ["vs", "am", "gear"], ["am", "gear"], ] lst = _margins(vars, False) assert lst == [] lst = _margins(vars, ["vs"]) assert lst == [[], ["vs", "am"]] lst = _margins(vars, ["am"]) assert lst == [[], ["am"]] lst = _margins(vars, ["vs", "am"]) assert lst == [[], ["vs", "am"], ["am"]] lst = _margins(vars, ["gear"]) assert lst == [[], ["gear"]] def test_add_margins(): df = mtcars.loc[:, ["mpg", "disp", "vs", "am", "gear"]] n = len(df) all_lst = ["(all)"] * n vars = [("vs", "am"), ("gear",)] dfx = add_margins(df, vars, True) assert dfx["vs"].dtype == "category" assert dfx["am"].dtype == "category" assert dfx["gear"].dtype == "category" # What we expect, where each row is of # column length n # # mpg disp vs am gear # --- ---- -- -- ---- # * * * * * # * * (all) (all) * # * * * (all) * # * * * * (all) # * * (all) (all) (all) # * * * (all) (all) assert all(dfx.loc[0 : n - 1, "am"] != all_lst) assert all(dfx.loc[0 : n - 1, "vs"] != all_lst) assert all(dfx.loc[0 : n - 1, "gear"] != all_lst) assert all(dfx.loc[n : 2 * n - 1, "vs"] == all_lst) assert all(dfx.loc[n : 2 * n - 1, "am"] == all_lst) assert all(dfx.loc[2 * n : 3 * n - 1, "am"] == all_lst) assert all(dfx.loc[3 * n : 4 * n - 1, "gear"] == all_lst) assert all(dfx.loc[4 * n : 5 * n - 1, "am"] == all_lst) assert all(dfx.loc[4 * n : 5 * n - 1, "vs"] == all_lst) assert all(dfx.loc[4 * n : 5 * n - 1, "gear"] == all_lst) assert all(dfx.loc[5 * n : 6 * n - 1, "am"] == all_lst) assert all(dfx.loc[5 * n : 6 * n - 1, "gear"] == all_lst) def test_ninteraction(): simple_vectors = [ list(string.ascii_lowercase), random.sample(string.ascii_lowercase, 26), list(range(1, 27)), ] # vector of unique values is equivalent to rank for case in simple_vectors: df = pd.DataFrame(case) rank = df.rank(method="min") rank = rank[0].astype(int).tolist() rank_df = ninteraction(df) assert rank == rank_df # duplicates are numbered sequentially # df ids # [6, 6, 4, 4, 5, 5] -> [3, 3, 1, 1, 2, 2] for case in simple_vectors: rank = pd.DataFrame(case).rank(method="min") rank = rank[0].astype(int).repeat(2).tolist() rank_df = ninteraction(pd.DataFrame(np.array(case).repeat(2))) assert rank == rank_df # grids are correctly ranked df = pd.DataFrame(list(itertools.product([1, 2], range(1, 11)))) assert ninteraction(df) == list(range(1, len(df) + 1)) assert ninteraction(df, drop=True) == list(range(1, len(df) + 1)) # zero length dataframe df = pd.DataFrame() assert ninteraction(df) == [] # dataframe with single variable df = pd.DataFrame({"a": ["a"]}) assert ninteraction(df) == [1] df = pd.DataFrame({"a": ["b"]}) assert ninteraction(df) == [1] def test_ninteraction_datetime_series(): # When a pandas datetime is converted Numpy datetime, the two # no longer compare as equal! This test ensures that case is # not happening lst = ["2020-01-01", "2020-01-02", "2020-01-03"] df1 = pd.DataFrame( { "x": list("abcaabbcc"), "date_list": lst * 3, } ) df2 = pd.DataFrame( { "x": list("abcaabbcc"), "date_list": pd.to_datetime(lst * 3), } ) assert ninteraction(df1) == ninteraction(df2) def test_join_keys(): df1 = pd.DataFrame( { "a": [0, 0, 1, 1, 2, 2], "b": [0, 1, 2, 3, 1, 2], "c": [0, 1, 2, 3, 4, 5], } ) # same array and columns the keys should be the same keys = join_keys(df1, df1, ["a", "b"]) assert list(keys["x"]) == [1, 2, 3, 4, 5, 6] assert list(keys["x"]) == [1, 2, 3, 4, 5, 6] # Every other element of df2['b'] is changed # so every other key should be different df2 = pd.DataFrame( { "a": [0, 0, 1, 1, 2, 2], "b": [0, 11, 2, 33, 1, 22], "c": [1, 2, 3, 4, 5, 6], } ) keys = join_keys(df1, df2, ["a", "b"]) assert list(keys["x"]) == [1, 2, 4, 5, 7, 8] assert list(keys["y"]) == [1, 3, 4, 6, 7, 9] def test_match(): v1 = [1, 1, 2, 2, 3, 3] v2 = [1, 2, 3] v3 = [0, 1, 2] c = [1] assert list(match(v1, v2)) == [0, 0, 1, 1, 2, 2] assert list(match(v1, v2, incomparables=c)) == [-1, -1, 1, 1, 2, 2] assert list(match(v1, v3)) == [1, 1, 2, 2, -1, -1] def test_uniquecols(): df = pd.DataFrame( { "x": [1, 2, 3, 4], "y": ["a", "b", "c", "d"], "z": [8] * 4, "other": ["same"] * 4, } ) df2 = pd.DataFrame({"z": [8], "other": ["same"]}) result = uniquecols(df) assert result.equals(df2) def test_remove_missing(): df = pd.DataFrame({"a": [1.0, np.NaN, 3, np.inf], "b": [1, 2, 3, 4]}) df2 = pd.DataFrame({"a": [1.0, 3, np.inf], "b": [1, 3, 4]}) df3 = pd.DataFrame({"a": [1.0, 3], "b": [1, 3]}) with warnings.catch_warnings(record=True) as w: res = remove_missing(df, na_rm=True, vars=["b"]) res.equals(df) res = remove_missing(df) res.equals(df2) res = remove_missing(df, na_rm=True, finite=True) res.equals(df3) assert len(w) == 1 def test_pivot_apply(): df = pd.DataFrame( { "id": list("abcabc"), "x": [1, 2, 3, 11, 22, 33], "y": [1, 2, 3, 11, 22, 33], } ) res1 = pivot_apply(df, "x", "id", np.min) res2 = pivot_apply(df, "y", "id", np.max) assert res1.index.tolist() == list("abc") assert res1.index.name == "id" assert (res1 + res2 == [12, 24, 36]).all()
import 'mocha'; import { expect } from 'chai'; import * as request from 'supertest'; import Server from '../server'; describe('Examples', () => { it('should get all examples', () => request(Server) .get('/api/v1/examples') .expect('Content-Type', /json/) .then(r => { expect(r.body) .to.be.an('array') .of.length(2); })); it('should add a new example', () => request(Server) .post('/api/v1/examples') .send({ name: 'test' }) .expect('Content-Type', /json/) .then(r => { expect(r.body) .to.be.an('object') .that.has.property('name') .equal('test'); })); it('should get an example by id', () => request(Server) .get('/api/v1/examples/2') .expect('Content-Type', /json/) .then(r => { expect(r.body) .to.be.an('object') .that.has.property('name') .equal('test'); })); });
class Interpolator: """ Provides an interpolator for a regular grid with consistent stepsize along an axis. The interpolation scheme used is currently piecewise linear polynomials. As such, the convergence rate is algebraic with a rate of 2. First derivatives are achieved using second order finite differences to not stump the convergence rate. Pay attention: No care is taken for periodicity or out of bound: Make sure all points to be interpolated are within the regular grid! """ def __init__(self, x, f): self.x = x self.f = f self.dx = x[0][1]-x[0][0] self.dy = x[1][1]-x[1][0] self.dz = x[2][1]-x[2][0] # For derivative self.derF = np.gradient(self.f,self.dx,self.dy,self.dz, edge_order=2,axis=(0,1,2)) ### ------------------------------------------------------------------------ ### Evaluation functions ### ------------------------------------------------------------------------ def __call__(self, x, y, z): """ Evaluates interpolated value at given points. """ indX = ((x-self.x[0][0])/self.dx).astype(int) indY = ((y-self.x[1][0])/self.dy).astype(int) indZ = ((z-self.x[2][0])/self.dz).astype(int) return ((self.x[0][indX+1]-x)*( \ (self.x[1][indY+1]-y)*( \ (self.x[2][indZ+1]-z)*self.f[indX,indY,indZ] \ +(z-self.x[2][indZ])*self.f[indX,indY,indZ+1]) \ +(y-self.x[1][indY])*( \ (self.x[2][indZ+1]-z)*self.f[indX,indY+1,indZ] \ +(z-self.x[2][indZ])*self.f[indX,indY+1,indZ+1])) \ +(x-self.x[0][indX])*( \ (self.x[1][indY+1]-y)*( \ (self.x[2][indZ+1]-z)*self.f[indX+1,indY,indZ] \ +(z-self.x[2][indZ])*self.f[indX+1,indY,indZ+1]) \ +(y-self.x[1][indY])*( \ (self.x[2][indZ+1]-z)*self.f[indX+1,indY+1,indZ] \ +(z-self.x[2][indZ])*self.f[indX+1,indY+1,indZ+1])))\ / (self.dx*self.dy*self.dz) def gradient(self, x, y, z, direct): """ Evaluates gradient of interpolation in specified direction (x=1,y=2,z=3). Pay attention if units used for grid differ from units for function! """ try: tmp = self.derF[direct-1] except IndexError: raise NotImplementedError( \ "Gradient in direction {} is not available".format(direct)) indX = ((x-self.x[0][0])/self.dx).astype(int) indY = ((y-self.x[1][0])/self.dy).astype(int) indZ = ((z-self.x[2][0])/self.dz).astype(int) return ((self.x[0][indX+1]-x)*( \ (self.x[1][indY+1]-y)*( \ (self.x[2][indZ+1]-z)*tmp[indX,indY,indZ] \ +(z-self.x[2][indZ])*tmp[indX,indY,indZ+1]) \ +(y-self.x[1][indY])*( \ (self.x[2][indZ+1]-z)*tmp[indX,indY+1,indZ] \ +(z-self.x[2][indZ])*tmp[indX,indY+1,indZ+1])) \ +(x-self.x[0][indX])*( \ (self.x[1][indY+1]-y)*( \ (self.x[2][indZ+1]-z)*tmp[indX+1,indY,indZ] \ +(z-self.x[2][indZ])*tmp[indX+1,indY,indZ+1]) \ +(y-self.x[1][indY])*( \ (self.x[2][indZ+1]-z)*tmp[indX+1,indY+1,indZ] \ +(z-self.x[2][indZ])*tmp[indX+1,indY+1,indZ+1])))\ / (self.dx*self.dy*self.dz)
/* recursively populate an array from json/jsonb */ static Datum populate_array(ArrayIOData *aio, const char *colname, MemoryContext mcxt, JsValue *jsv) { PopulateArrayContext ctx; Datum result; int *lbs; int i; ctx.aio = aio; ctx.mcxt = mcxt; ctx.acxt = CurrentMemoryContext; ctx.astate = initArrayResult(aio->element_type, ctx.acxt, true); ctx.colname = colname; ctx.ndims = 0; ctx.dims = NULL; ctx.sizes = NULL; if (jsv->is_json) populate_array_json(&ctx, jsv->val.json.str, jsv->val.json.len >= 0 ? jsv->val.json.len : strlen(jsv->val.json.str)); else { populate_array_dim_jsonb(&ctx, jsv->val.jsonb, 1); ctx.dims[0] = ctx.sizes[0]; } Assert(ctx.ndims > 0); lbs = palloc(sizeof(int) * ctx.ndims); for (i = 0; i < ctx.ndims; i++) lbs[i] = 1; result = makeMdArrayResult(ctx.astate, ctx.ndims, ctx.dims, lbs, ctx.acxt, true); pfree(ctx.dims); pfree(ctx.sizes); pfree(lbs); return result; }
//------------------------------------------------------------------------------ // Returns 0 for end of file. // Returns 1 for start block, // Returns 2 for parameter-value pair (occurs after 1 but before 3). // Returns 3 for termination of start block. // Returns 4 for string inside block. Puts string in retVal. (param = nullptr) // Returns 5 for end block. // ======= // The statics should be instance variables ... int vtkPDataSetReader::ReadXML(istream* file, char** retBlock, char** retParam, char** retVal) { static char str[1024]; static char* ptr = nullptr; static char block[256]; static char param[256]; static char value[512]; static int inStartBlock = 0; char* tmp; if (ptr == nullptr) { block[0] = param[0] = value[0] = '\0'; } while (ptr == nullptr || *ptr == ' ' || *ptr == '\t' || *ptr == '\n' || *ptr == '\0') { if (ptr == nullptr || *ptr == '\0') { file->getline(str, 1024); if (file->fail()) { *retBlock = nullptr; *retParam = nullptr; *retVal = nullptr; return 0; } str[1023] = '\0'; ptr = str; } else { ++ptr; } } if (!inStartBlock && ptr[0] == '<' && ptr[1] == '/') { ptr += 2; tmp = block; while (*ptr != '>' && *ptr != ' ' && *ptr != '\0') { *tmp++ = *ptr++; } *tmp = '\0'; while (*ptr != '>' && *ptr != '\0') { *tmp++ = *ptr++; } *retBlock = block; *retParam = nullptr; *retVal = nullptr; if (*ptr == '\0') { vtkErrorMacro("Newline in end block."); return 0; } return 5; } if (!inStartBlock && ptr[0] == '<') { ptr += 1; tmp = block; while (*ptr != '>' && *ptr != ' ' && *ptr != '\0') { *tmp++ = *ptr++; } *tmp = '\0'; inStartBlock = 1; *retBlock = block; *retParam = nullptr; *retVal = nullptr; return 1; } if (inStartBlock && *ptr == '>') { ++ptr; inStartBlock = 0; *retBlock = block; *retParam = nullptr; *retVal = nullptr; return 3; } if (inStartBlock && ptr[0] == '/' && ptr[1] == '>') { if (inStartBlock == 2) { ptr += 2; inStartBlock = 0; *retBlock = block; *retParam = nullptr; *retVal = nullptr; return 5; } inStartBlock = 2; *retBlock = block; *retParam = nullptr; *retVal = nullptr; return 3; } if (!inStartBlock) { tmp = value; while (*ptr != '\0') { *tmp++ = *ptr++; } *tmp = '\0'; *retBlock = nullptr; *retParam = nullptr; *retVal = value; return 4; } tmp = param; while (*ptr != '=' && *ptr != '\0') { *tmp++ = *ptr++; } *tmp = '\0'; if (*ptr != '=') { vtkErrorMacro("Reached end of line before ="); return 0; } ++ptr; if (*ptr != '"') { vtkErrorMacro("Expecting parameter value to be in quotes."); return 0; } ++ptr; tmp = value; while (*ptr != '"' && *ptr != '\0') { *tmp++ = *ptr++; } *tmp = '\0'; if (*ptr != '"') { vtkErrorMacro("Newline found in parameter string."); return 0; } ++ptr; *retBlock = block; *retParam = param; *retVal = value; return 2; }
package com.dlorenzo.util; import java.util.function.Function; public enum JVMArgument implements CastableEnum { SOURCE_ID("sourceId"), WEB_SERVER("webServer"), FEATURES("features"); private final String key; JVMArgument(String key) { this.key = key; } @Override public Function<String, String> getMappingFunction() { return key -> System.getProperty(key); } @Override public String getKey() { return key; } }
Senate Minority Leader Charles Schumer Charles (Chuck) Ellis SchumerBrennan fires back at 'selfish' Trump over Harry Reid criticism Trump rips Harry Reid for 'failed career' after ex-Dem leader slams him in interview Harry Reid: 'I don't see anything' Trump is doing right MORE (D-N.Y.) said Monday that while the nation is still recovering from the deadliest mass shooting in modern U.S. history, Congress must pass legislation to prevent similar incidents. "We cannot banish evil in the earth. Congress can't do that; the president can't do that. What Congress can do, what Congress must do, is pass laws to keep our citizens safe," Schumer said from the Senate floor. Schumer didn't advocate for a specific gun control or background check bill but said necessary action "starts with laws that help prevent guns, especially the most dangerous guns, from falling into the wrong hands." ADVERTISEMENT At least 59 people were killed, and more than 520 injured, during a shooting at a country music festival in Las Vegas on Sunday night. Schumer said the shooting has sparked "horror, sadness and rage" as well as a flurry of questions about the shooter, including his personal history and the weapons used. "How did this monster acquire the arsenal he used to rain down death on a crowd of innocents? Were those guns purchased and compiled illegally?" Schumer asked. "Some [questions] will have answers, and we'll have to reckon with the fact that this man was able to assemble an arsenal of military-grade weapons." Democrats are using the shooting to renew their push for broader background check and tougher gun control bills, which are unlikely to advance in the GOP-controlled Congress. House Minority Leader Nancy Pelosi (D-Calif.) is asking Speaker Paul Ryan Paul Davis RyanBrexit and exit: A transatlantic comparison Five takeaways from McCabe’s allegations against Trump The Hill's 12:30 Report: Sanders set to shake up 2020 race MORE (R-Wis.) to establish a select committee on gun violence. Schumer added on Monday that the first step is to "bind up this new national wound" and then turn to determine how the shooting was able to be carried out. "We will ferret out the facts based on the reality we will confront; we must confront deeply troubling issues that are raised by this atrocity," he said. The alleged gunman, Stephen Paddock, opened fire from the 32nd floor of the Mandalay Bay Resort and Casino, which is situated southwest of the concert site. But authorities have not yet announced a motive for the the 64-year-old, who police said was found dead in his hotel room.
/** * Use this API to rename a cachepolicylabel resource. */ public static base_response rename(nitro_service client, String labelname, String new_labelname) throws Exception { cachepolicylabel renameresource = new cachepolicylabel(); renameresource.labelname = labelname; return renameresource.rename_resource(client,new_labelname); }
Swinging from branch to branch, sifting through the dense, lush foliage of the Amazonian rainforest, a young, subordinate male capuchin monkey stumbles upon a veritable gold mine. It's a cache of figs, all looking tantalizingly sweet and succulent. He picks one and takes a big bite. Delicious. Other members of the male's troop are nearby, and would undoubtedly want to be notified of the lavish finding. Knowing this, the young male sits back and lets loose a string of loud calls. But he doesn't emit the softer squeals that would intimate that food is nearby. Instead, he produces a cacophony of barks, hiccups, and coughs, informing the others that a predator is in the vicinity. "Stay away! Danger!" he yells. Except there is no predator nearby, and no danger, whatsoever. There are only the cherished figs. Instead of apprising the troop of the food's whereabouts, so that they may share in the tasty, nutritious bounty, the subordinate capuchin is deceiving them, so that he can selfishly scoff the figs all by himself! Deception is a hallmark of the animal kingdom. In the game of life, subterfuge is often synonymous with survival. Most of this trickery occurs between species -- think of all the animals that adopt camouflage to fool predators. However, very little deceit actually occurs within species, where, for the most part, honesty prevails. The capuchin example is a noted deviation, albeit an infrequent one. Human interactions, where dishonesty is quite commonplace, tender a wider selection of examples But now, researchers have discerned another example of within-species deception, this one among the gelada baboons of the Ethiopian Highlands. Reporting in the journal Nature Communications, an international team of researchers describes a scandalous subject: how primate adulterers conceal their infidelity. Competition for mating rights is stiff and brutal among gelada baboons. Males of this primate species achieve reproductive success by battling to become the dominant leader of a reproductive unit, which is composed of between one and a dozen females. This means that subordinate males are pretty much out-of-luck. Unless, of course, they can find a female willing to engage in a little lovemaking on the side. Spending over 2600 hours watching nineteen different baboon reproductive groups, the research team found that such extra-pair copulations are rare, making up only 9% of all copulations. The other 91% were between the dominant male and the females in his unit. You can probably guess why infidelity is so uncommon. When caught, the act often elicits violent punishment against the cheating pair, courtesy of the dominant male. Potentially out of the desire to avoid this reprisal, cheating pairs were witnessed to employ two deceptive tactics to conceal their infidelity. First, the baboons were found to significantly suppress their copulation vocalizations. In less scientific terms, they "kept the volume down" while having sex. Second, the research team found that extra-pair copulations were far more common when the dominant male was greater than twenty meters away. Thus, it seems that the cheaters took into account the visual perspective of the dominant male and decided to mate when his "back was turned," so to speak. The research team's soap opera-esque findings beg some intriguing questions. Are the baboons' behaviors indicative of higher order cognitive skills? Are they merely a result of more simplistic associative learning? Will the gelada baboons get their own reality television show? Source: Aliza le Roux, Noah Snyder-Mackler, Eila K. Roberts, Jacinta C. Beehner & Thore J. Bergman (2013) Evidence for tactical concealment in a wild primate. Nature Communications 4:1462 DOI: 10.1038/ncomms2468 (Image: Baboon Unit by Dave Watts via Wikimedia Commons)
#include<bits/stdc++.h> using namespace std; int main(){ int a,b,n,m; cin>>n>>m>>a>>b; int N=100000; int ans[N][2]; int count=0; int vis[n+1][m+1]; for(int i=0;i<=n;i++){ for(int j=0;j<=m;j++){ vis[i][j]=0; count++; } } vis[a][b]=1; ans[1][0]=a,ans[1][1]=1; int cnt=2; ans[0][0]=a,ans[0][1]=b; vis[a][1]=1; int i=0; for(int j=1;j<=m;j++){ if(i==0){ i=1; while(i<=n){ if(!vis[i][j]){ vis[i][j]=1; ans[cnt][0]=i,ans[cnt][1]=j; cnt++; count++; } // if(i==n)continue; i++; } } else{ i=n; while(i>=1){ if(!vis[i][j]){ vis[i][j]=1; ans[cnt][0]=i,ans[cnt][1]=j; for(int r=0;r<count;r++){ count++; } cnt++; count++; } // if(i==1)continue; i--; } } count--; } for(int i=0;i<(n*m);i++){ cout<<ans[i][0]<<" "<<ans[i][1]; cout<<endl; } }
The Story of Storyboards from East Sepik, Papua New Guinea Abstract This study analyzes and interprets East Sepik storyboards, which the authors regard as a form of cultural continuity and instrument of cultural memory in the post-colonial period. The study draws on field research conducted by the authors in the village of Kambot in East Sepik. The authors divide the storyboards into two groups based on content. The first includes storyboards describing daily life in the community, while the other links the daily life to pre-Christian religious beliefs and views. The aim of the study is to analyze one of the forms of contemporary material culture in East Sepik in the context of cultural changes triggered by Christianization, colonial administration in the former Territory of New Guinea and global tourism.
#include <bits/stdc++.h> using namespace std; #define closeSync ios::sync_with_stdio(false);cin.tie(0);cout.tie(0) const int MAXN = 200005; const int INF = 0x3f3f3f3f; const int MOD = 1e9+7; typedef pair<int,int> pii; int vis[MAXN]; vector<pii> v; vector<pii> tmp; int n,k,N; inline bool in(int x,int l,int r) { if (l > r) { return (x >= 0 && x < r) || (x > l); } else { return x > l && x < r; } } inline int cal() { int cnt = 0; int ed = v.size() - 1; for (int i=0;i<ed;i++) { for (int j=i+1;j<=ed;j++) { if (in(v[i].first,v[j].first,v[j].second) + in(v[i].second,v[j].first,v[j].second) == 1) { cnt++; } } } return cnt; } inline void solve() { cin >> n >> k; N = n << 1; for (int i=0;i<N;i++) vis[i] = 0; tmp.clear(); for (int i=1;i<=k;i++) { int x,y; cin >> x >> y; x--,y--; tmp.push_back({x,y}); vis[x] = vis[y] = 1; } vector<int> p; for (int i=0;i<N;i++) if (!vis[i]) p.push_back(i); int len = p.size(); int mx = 0; assert(len%2==0); for (int stp=len/2;stp<=len/2;stp++) { v = tmp; for (int i=0;i<len/2;i++) v.push_back({p[i],p[i+stp]}); assert(v.size() == n); for (int i=0;i<n;i++) if (v[i].first > v[i].second) swap(v[i].first,v[i].second); mx = max(mx,cal()); } // for (auto c : v) // { // cout << c.first << " " << c.second << "\n"; // } // cout << "\n"; cout << mx << "\n"; return ; } int main() {closeSync; int T; cin >> T; while (T--) solve(); return 0; } /* 1 2 2 1 2 3 4 */
/** * Immutable byte[]-backed String replacement. */ public final class CompressedString extends AbstractCompressedString<CompressedString> { public CompressedString() {} public CompressedString(String data) { super(data); } public CompressedString(char[] data, int offset, int length) { super(data, offset, length); } public CompressedString(char[] data) { super(data); } public CompressedString(ByteBuffer data, int offset, int length) { super(data, offset, length); } public CompressedString(ByteBuffer data) { super(data); } public CompressedString(byte[] data, int offset, int length) { super(data, offset, length); } public CompressedString(byte[] data) { super(data); } @Override @NotNull public CompressedString toCompressedString() { return this; } @Override @NotNull public MappedCompressedString toMappedCompressedString() { return new MappedCompressedString(getData()); } @Override protected final CompressedString convertValue(final String string) { return new CompressedString(string); } @Override protected final CompressedString convertValue(final byte[] data, final int offset, final int length) { return new CompressedString(data, offset, length); } /** * Helper to be statically imported for groovy scripting. * * @param value The String to convert * @return A new CompressedString with the same contents as value, assuming ISO-8859-1 encoding */ public static CompressedString compress(final String value) { return new CompressedString(value); } }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/aura/client/aura_constants.h" #include "services/ui/public/interfaces/window_manager_constants.mojom.h" #include "ui/aura/window_property.h" #include "ui/gfx/geometry/rect.h" DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, bool) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, base::string16*) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, ui::ModalType) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, gfx::ImageSkia*) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, gfx::Rect*) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, gfx::Size*) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, std::string*) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, ui::WindowShowState) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, ui::mojom::WindowType); DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, void*) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, SkColor) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, int32_t) DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(AURA_EXPORT, int64_t) namespace aura { namespace client { // Alphabetical sort. DEFINE_WINDOW_PROPERTY_KEY(bool, kAccessibilityFocusFallsbackToWidgetKey, true); DEFINE_WINDOW_PROPERTY_KEY(bool, kAlwaysOnTopKey, false); DEFINE_WINDOW_PROPERTY_KEY(bool, kAnimationsDisabledKey, false); DEFINE_OWNED_WINDOW_PROPERTY_KEY(gfx::ImageSkia, kAppIconKey, nullptr); DEFINE_OWNED_WINDOW_PROPERTY_KEY(std::string, kAppIdKey, nullptr); DEFINE_WINDOW_PROPERTY_KEY(int, kAppType, 0); DEFINE_WINDOW_PROPERTY_KEY(bool, kConstrainedWindowKey, false); DEFINE_WINDOW_PROPERTY_KEY(bool, kDrawAttentionKey, false); DEFINE_WINDOW_PROPERTY_KEY(bool, kMirroringEnabledKey, false); DEFINE_WINDOW_PROPERTY_KEY(Window*, kHostWindowKey, nullptr); DEFINE_WINDOW_PROPERTY_KEY(ui::ModalType, kModalKey, ui::MODAL_TYPE_NONE); DEFINE_OWNED_WINDOW_PROPERTY_KEY(std::string, kNameKey, nullptr); DEFINE_OWNED_WINDOW_PROPERTY_KEY(gfx::Size, kPreferredSize, nullptr); DEFINE_WINDOW_PROPERTY_KEY(int32_t, kResizeBehaviorKey, ui::mojom::kResizeBehaviorCanResize); DEFINE_OWNED_WINDOW_PROPERTY_KEY(gfx::Rect, kRestoreBoundsKey, nullptr); DEFINE_WINDOW_PROPERTY_KEY( ui::WindowShowState, kRestoreShowStateKey, ui::SHOW_STATE_DEFAULT); DEFINE_WINDOW_PROPERTY_KEY( ui::WindowShowState, kShowStateKey, ui::SHOW_STATE_DEFAULT); DEFINE_OWNED_WINDOW_PROPERTY_KEY(base::string16, kTitleKey, nullptr); DEFINE_WINDOW_PROPERTY_KEY(int, kTopViewInset, 0); DEFINE_WINDOW_PROPERTY_KEY(SkColor, kTopViewColor, SK_ColorTRANSPARENT); DEFINE_OWNED_WINDOW_PROPERTY_KEY(gfx::ImageSkia, kWindowIconKey, nullptr); DEFINE_WINDOW_PROPERTY_KEY(ui::mojom::WindowType, kWindowTypeKey, ui::mojom::WindowType::UNKNOWN); } // namespace client } // namespace aura
// MustWrapTransport likes WrapTransport but panics when an error occurs. func MustWrapTransport(transport http.RoundTripper) http.RoundTripper { transport, err := WrapTransport(transport) if err != nil { panic(fmt.Sprintf("wrap transport: %v", err)) } return transport }
package prompt import ( "github.com/google/wire" ) var WireSet = wire.NewSet( NewTerminalPrompt, wire.Value(OpenInput(TTYOpen)))
def worker(id, td3_trainer, osi_model, osi_l, osi_input_dim, osi_batch_size, osi_itr, osi_pretrain_eps, env_name, environment_params, environment_wrappers,\ environment_wrapper_arguments, rewards_queue, eval_rewards_queue, success_queue,\ eval_success_queue, osi_loss_queue, eval_interval, replay_buffer, max_episodes, max_steps, batch_size, explore_steps, noise_decay, update_itr, explore_noise_scale, \ eval_noise_scale, reward_scale, DETERMINISTIC, hidden_dim, model_path, seed=1): with torch.cuda.device(id % torch.cuda.device_count()): td3_trainer.to_cuda() print(td3_trainer, replay_buffer) env= make_env('robosuite.'+env_name, seed, id, environment_params, environment_wrappers,environment_wrapper_arguments)() action_dim = env.action_space.shape[0] frame_idx=0 rewards=[] history_data = [] params_list = [] current_explore_noise_scale = explore_noise_scale for eps in range(max_episodes): episode_reward = 0 epi_traj = [] state = env.reset() params = query_params(env, randomised_only=RANDOMISZED_ONLY, dynamics_only=DYNAMICS_ONLY) params_state = np.concatenate((params, state)) current_explore_noise_scale = current_explore_noise_scale*noise_decay for step in range(max_steps): if CAT_INTERNAL: internal_state = env.get_internal_state() full_state = np.concatenate([state, internal_state]) else: full_state = state if frame_idx > explore_steps: action = td3_trainer.policy_net.get_action(params_state, noise_scale=current_explore_noise_scale) else: action = td3_trainer.policy_net.sample_action() try: next_state, reward, done, info = env.step(action) if environment_params["has_renderer"] and environment_params["render_visual_mesh"]: env.render() except KeyboardInterrupt: print('Finished') if osi_model is None: td3_trainer.save_model(model_path) except MujocoException: print('MujocoException') break if info["unstable"]: break if CAT_INTERNAL: target_joint_action = info["joint_velocities"] full_action = np.concatenate([action, target_joint_action]) else: full_action = action epi_traj.append(np.concatenate((full_state, full_action))) if len(epi_traj)>=osi_l and osi_model is not None and eps > osi_pretrain_eps: osi_input = stack_data(epi_traj, osi_l) pre_params = osi_model(osi_input).detach().numpy() else: pre_params = params if osi_model is not None and eps > osi_pretrain_eps: if len(epi_traj)>=osi_l: osi_input = stack_data(epi_traj, osi_l) pre_params = osi_model(osi_input).detach().numpy() else: zero_osi_input = np.zeros(osi_input_dim) pre_params = osi_model(zero_osi_input).detach().numpy() else: pre_params = params next_params_state = np.concatenate((pre_params, next_state)) replay_buffer.push(params_state, action, reward, next_params_state, done) state = next_state params_state = next_params_state episode_reward += reward frame_idx += 1 if replay_buffer.get_length() > batch_size and osi_model is None: for i in range(update_itr): _=td3_trainer.update(batch_size, eval_noise_scale=eval_noise_scale, reward_scale=reward_scale) if done: break if osi_model is not None: history_data.append(np.array(epi_traj)) params_list.append(params) if eps % osi_batch_size == 0 and eps>0: label, data = generate_data(params_list, history_data, length = osi_l) osi_model, loss = OnlineSIupdate(osi_model, data, label, epoch=osi_itr) osi_loss_queue.put(loss) params_list = [] history_data = [] torch.save(osi_model.state_dict(), model_path+'_osi') print('OSI Episode: {} | Epoch Loss: {}'.format(eps, loss)) else: print('Worker: ', id, '|Episode: ', eps, '| Episode Reward: ', episode_reward) rewards_queue.put(episode_reward) success_queue.put(info['success']) if eps % eval_interval == 0 and eps>0: td3_trainer.save_model(model_path) eval_r, eval_succ = evaluate(env, td3_trainer.policy_net, up=True, randomised_only=RANDOMISZED_ONLY, dynamics_only=DYNAMICS_ONLY) eval_rewards_queue.put(eval_r) eval_success_queue.put(eval_succ) if osi_model is not None: torch.save(osi_model.state_dict(), model_path+'_osi') else: td3_trainer.save_model(model_path)
def _run_sc(self,toRun): for params in toRun: k,sigma,dpath,dtype = params sc = SpectralCluster(self.distancePath,dtype=dtype) sc.run(k,sk=None,sigma=sigma,verbose=True) clusterSizes = self.get_cluster_sizes(sc) self.writer1.writerow([k,sigma] + [round(sc.avgSilValue,4)]) self.writer2.writerow([k,sigma] + clusterSizes)
// Convert slice of ConcretePrivateAccounts to slice of SigningAccounts func SigningAccounts(concretePrivateAccounts []*PrivateAccount) []AddressableSigner { signingAccounts := make([]AddressableSigner, len(concretePrivateAccounts)) for i, cpa := range concretePrivateAccounts { signingAccounts[i] = cpa } return signingAccounts }
/** * Does nothing if it has already been removed from the map * (due to a concurrent disposal). */ private void disposeDisposableFont(AbstractBwdFont<BF> font) { final BwdFontId fontId = font.id(); final AbstractBwdFont<BF> removed; synchronized (this.homeMutex) { removed = this.disposableFontById.remove(fontId); } if (removed == null) { return; } final BwdFontId removedFontId = removed.id(); if (!removedFontId.equals(fontId)) { throw new AssertionError(removedFontId + " !equals " + fontId); } final BF backingFont = font.getBackingFont(); this.disposeBackingFont(backingFont); }
def servo_pulse_cb_(self, msg): servo = self.servos[msg.name] self.set_servo_pulse_(servo, int(msg.value)) rospy.loginfo('servo: {}, channel: {}, value: {}'.format( msg.name, servo['channel'], int(msg.value)))
import { Bleet } from "types/Bleet"; export interface IBleeter { type: "GET_BLEETS" | "CREATE_BLEET"; bleets: Bleet[]; } export interface GetBleetById { type: "GET_BLEET_BY_ID"; bleet: Bleet; } export interface UpdateBleetById { type: "UPDATE_BLEET_BY_ID"; bleet: Bleet; } export type Actions = IBleeter | UpdateBleetById | GetBleetById;
def _validate_arguments(parsed_args): if (parsed_args.mode in {_MODE_DOWNLOAD, _MODE_UPLOAD} and not parsed_args.archive_path): _die('--archive_path missing') if parsed_args.batch_size < 1: _die('--batch_size must be a positive value') if (parsed_args.mode == _MODE_DOWNLOAD and os.path.exists(parsed_args.archive_path)): _die( 'Cannot download to archive path %s; file already exists' % ( parsed_args.archive_path)) if parsed_args.disable_remote and parsed_args.mode != _MODE_RUN: _die('--disable_remote supported only if mode is ' + _MODE_RUN) if parsed_args.force_overwrite and not ( parsed_args.mode == _MODE_UPLOAD and parsed_args.type == _TYPE_COURSE): _die( '--force_overwrite supported only if mode is %s and type is %s' % ( _MODE_UPLOAD, _TYPE_COURSE)) if parsed_args.privacy and not ( parsed_args.mode == _MODE_DOWNLOAD and parsed_args.type == _TYPE_DATASTORE): _die( '--privacy supported only if mode is %s and type is %s' % ( _MODE_DOWNLOAD, _TYPE_DATASTORE)) if parsed_args.privacy_secret and not ( parsed_args.mode == _MODE_DOWNLOAD and parsed_args.type == _TYPE_DATASTORE and parsed_args.privacy): _die( '--privacy_secret supported only if mode is %s, type is %s, and ' '--privacy is passed' % (_MODE_DOWNLOAD, _TYPE_DATASTORE))
MINNEAPOLIS -- The addition of linebackers Emmanuel Lamur and Travis Lewis in free agency means the Minnesota Vikings should boast one of the deeper groups at linebacker -- in addition to one of the more athletic special-teams units -- they've had in years. And for Lamur, in particular, the decision to come to the Vikings was simple. "The reason why I came here was because of [Mike] Zimmer. Simple as that," Lamur said in a conference call on Thursday. "He's a good coach; he brings the best out of players, and I wanted to play for him. ... As soon as I met Coach, it happened right away. He's smart. He's intelligent. You can see that right away. He's very aggressive as a coach, and that's a coach you want to play for." Lamur, who played for Zimmer in 2012 with the Cincinnati Bengals before missing 2013 with a shoulder injury, had primarily worked at strongside linebacker for the Bengals, but spent some time at both spots. At 6-foot-4, the former safety projects as a backup for Anthony Barr, with the speed to make some plays in the open field. Lewis, also, was a strongside linebacker in Detroit, and worked his way into the starting lineup for four games before losing his job in the Lions' defense and playing special teams in the second half of 2015. Special teams could be where he makes his impact on the Vikings' roster, as well; he made 18 special-teams tackles during four seasons with the Lions. The additions give the Vikings seven linebackers under the age of 28 who played at least seven games last season. For now, at least, the status of the group's senior member remains unresolved, though linebacker Chad Greenway continues to talk with the Vikings about a contract for 2016. Those talks have been productive, Greenway said Thursday, and they come as the Vikings focus on their top priority -- improving the offensive line. Greenway has said he wants to return for an 11th (and probably final) season in Minnesota, and Zimmer said last month he expects the 33-year-old will be back. The Vikings' additions at linebacker don't necessarily seem like they'd preclude Greenway returning to his spot as a weak-side linebacker, and the veteran said he doesn't take them personally. "They're trying to win championships," he said. "That's what every organization should do." As open as the Vikings and Greenway have been with each other about their desire to continue working together, it's probably not beyond the pale for the two sides to go a few days into the league year without an agreement. A return for Greenway has to come under the right circumstances, but it still seems likely the Vikings will have one of their key leaders back next year.
/** * @author Andrew Harrison * @version 1.0.0 Jan 15, 2011 */ public class MD5 { public static String getMD5Hash(String input) { return getMD5Hash(input.getBytes()); } public static String getMD5Hash(byte[] input) { try { MessageDigest md5 = MessageDigest.getInstance("MD5"); md5.update(input); return toHex(md5.digest()); } catch (NoSuchAlgorithmException e) { return null; } } public static String getMD5Hash(InputStream in) throws IOException { try { MessageDigest md5 = MessageDigest.getInstance("MD5"); byte[] input = new byte[4096]; int c; while ((c = in.read(input)) != -1) { md5.update(input, 0, c); } return toHex(md5.digest()); } catch (NoSuchAlgorithmException e) { return null; } } /** * Return an 8 byte representation of the 32 byte MD5 digest * * @param digest the message digest * @return String 8 byte hexadecimal */ public static String toHex(byte[] digest) { StringBuffer buf = new StringBuffer(); for (int i = 0; i < digest.length; i++) { buf.append(Integer.toHexString((int) digest[i] & 0x00FF)); } return buf.toString(); } }
<reponame>DunnCreativeSS/cash_carry_leveraged_futures_arbitrageur # -*- coding: utf-8 -*- # PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN: # https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code from ccxtpro.base.exchange import Exchange import ccxt.async_support as ccxt from ccxtpro.base.cache import ArrayCache class bitstamp(Exchange, ccxt.bitstamp): def describe(self): return self.deep_extend(super(bitstamp, self).describe(), { 'has': { 'ws': True, 'watchOrderBook': True, 'watchTrades': True, 'watchOHLCV': False, 'watchTicker': False, 'watchTickers': False, }, 'urls': { 'api': { 'ws': 'wss://ws.bitstamp.net', }, }, 'options': { 'watchOrderBook': { 'type': 'order_book', # detail_order_book, diff_order_book }, 'tradesLimit': 1000, 'OHLCVLimit': 1000, }, }) async def watch_order_book(self, symbol, limit=None, params={}): await self.load_markets() market = self.market(symbol) options = self.safe_value(self.options, 'watchOrderBook', {}) type = self.safe_string(options, 'type', 'order_book') messageHash = type + '_' + market['id'] url = self.urls['api']['ws'] request = { 'event': 'bts:subscribe', 'data': { 'channel': messageHash, }, } subscription = { 'messageHash': messageHash, 'type': type, 'symbol': symbol, 'method': self.handle_order_book_subscription, 'limit': limit, 'params': params, } message = self.extend(request, params) future = self.watch(url, messageHash, message, messageHash, subscription) return await self.after(future, self.limit_order_book, symbol, limit, params) async def fetch_order_book_snapshot(self, client, message, subscription): symbol = self.safe_string(subscription, 'symbol') limit = self.safe_integer(subscription, 'limit') params = self.safe_value(subscription, 'params') messageHash = self.safe_string(subscription, 'messageHash') # todo: self is a synch blocking call in ccxt.php - make it async snapshot = await self.fetch_order_book(symbol, limit, params) orderbook = self.safe_value(self.orderbooks, symbol) if orderbook is not None: orderbook.reset(snapshot) # unroll the accumulated deltas messages = orderbook.cache for i in range(0, len(messages)): message = messages[i] self.handle_order_book_message(client, message, orderbook) self.orderbooks[symbol] = orderbook client.resolve(orderbook, messageHash) def handle_delta(self, bookside, delta): price = self.safe_float(delta, 0) amount = self.safe_float(delta, 1) id = self.safe_string(delta, 2) if id is None: bookside.store(price, amount) else: bookside.store(price, amount, id) def handle_deltas(self, bookside, deltas): for i in range(0, len(deltas)): self.handle_delta(bookside, deltas[i]) def handle_order_book_message(self, client, message, orderbook, nonce=None): data = self.safe_value(message, 'data', {}) microtimestamp = self.safe_integer(data, 'microtimestamp') if (nonce is not None) and (microtimestamp <= nonce): return orderbook self.handle_deltas(orderbook['asks'], self.safe_value(data, 'asks', [])) self.handle_deltas(orderbook['bids'], self.safe_value(data, 'bids', [])) orderbook['nonce'] = microtimestamp timestamp = int(microtimestamp / 1000) orderbook['timestamp'] = timestamp orderbook['datetime'] = self.iso8601(timestamp) return orderbook def handle_order_book(self, client, message): # # initial snapshot is fetched with ccxt's fetchOrderBook # the feed does not include a snapshot, just the deltas # # { # data: { # timestamp: '1583656800', # microtimestamp: '1583656800237527', # bids: [ # ["8732.02", "0.00002478", "1207590500704256"], # ["8729.62", "0.01600000", "1207590502350849"], # ["8727.22", "0.01800000", "1207590504296448"], # ], # asks: [ # ["8735.67", "2.00000000", "1207590693249024"], # ["8735.67", "0.01700000", "1207590693634048"], # ["8735.68", "1.53294500", "1207590692048896"], # ], # }, # event: 'data', # channel: 'detail_order_book_btcusd' # } # channel = self.safe_string(message, 'channel') subscription = self.safe_value(client.subscriptions, channel) symbol = self.safe_string(subscription, 'symbol') type = self.safe_string(subscription, 'type') orderbook = self.safe_value(self.orderbooks, symbol) if orderbook is None: return message if type == 'order_book': orderbook.reset({}) self.handle_order_book_message(client, message, orderbook) client.resolve(orderbook, channel) # replace top bids and asks elif type == 'detail_order_book': orderbook.reset({}) self.handle_order_book_message(client, message, orderbook) client.resolve(orderbook, channel) # replace top bids and asks elif type == 'diff_order_book': # process incremental deltas nonce = self.safe_integer(orderbook, 'nonce') if nonce is None: # buffer the events you receive from the stream orderbook.cache.append(message) else: try: self.handle_order_book_message(client, message, orderbook, nonce) client.resolve(orderbook, channel) except Exception as e: if symbol in self.orderbooks: del self.orderbooks[symbol] if channel in client.subscriptions: del client.subscriptions[channel] client.reject(e, channel) async def watch_trades(self, symbol, since=None, limit=None, params={}): await self.load_markets() market = self.market(symbol) options = self.safe_value(self.options, 'watchTrades', {}) type = self.safe_string(options, 'type', 'live_trades') messageHash = type + '_' + market['id'] url = self.urls['api']['ws'] request = { 'event': 'bts:subscribe', 'data': { 'channel': messageHash, }, } subscription = { 'messageHash': messageHash, 'type': type, 'symbol': symbol, 'limit': limit, 'params': params, } message = self.extend(request, params) future = self.watch(url, messageHash, message, messageHash, subscription) return await self.after(future, self.filter_by_since_limit, since, limit, 'timestamp', True) def parse_trade(self, trade, market=None): # # { # buy_order_id: 1211625836466176, # amount_str: '1.08000000', # timestamp: '1584642064', # microtimestamp: '1584642064685000', # id: 108637852, # amount: 1.08, # sell_order_id: 1211625840754689, # price_str: '6294.77', # type: 1, # price: 6294.77 # } # microtimestamp = self.safe_integer(trade, 'microtimestamp') if microtimestamp is None: return super(bitstamp, self).parse_trade(trade, market) id = self.safe_string(trade, 'id') timestamp = int(microtimestamp / 1000) price = self.safe_float(trade, 'price') amount = self.safe_float(trade, 'amount') cost = None if (price is not None) and (amount is not None): cost = price * amount symbol = None marketId = self.safe_string(trade, 's') if marketId in self.markets_by_id: market = self.markets_by_id[marketId] if (symbol is None) and (market is not None): symbol = market['symbol'] side = self.safe_integer(trade, 'type') side = 'buy' if (side == 0) else 'sell' return { 'info': trade, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'id': id, 'order': None, 'type': None, 'takerOrMaker': None, 'side': side, 'price': price, 'amount': amount, 'cost': cost, 'fee': None, } def handle_trade(self, client, message): # # { # data: { # buy_order_id: 1207733769326592, # amount_str: "0.14406384", # timestamp: "1583691851", # microtimestamp: "1583691851934000", # id: 106833903, # amount: 0.14406384, # sell_order_id: 1207733765476352, # price_str: "8302.92", # type: 0, # price: 8302.92 # }, # event: "trade", # channel: "live_trades_btcusd" # } # # the trade streams push raw trade information in real-time # each trade has a unique buyer and seller channel = self.safe_string(message, 'channel') data = self.safe_value(message, 'data') subscription = self.safe_value(client.subscriptions, channel) symbol = self.safe_string(subscription, 'symbol') market = self.market(symbol) trade = self.parse_trade(data, market) array = self.safe_value(self.trades, symbol) if array is None: limit = self.safe_integer(self.options, 'tradesLimit', 1000) array = ArrayCache(limit) self.trades[symbol] = array array.append(trade) client.resolve(array, channel) def handle_order_book_subscription(self, client, message, subscription): type = self.safe_string(subscription, 'type') symbol = self.safe_string(subscription, 'symbol') if symbol in self.orderbooks: del self.orderbooks[symbol] if type == 'order_book': limit = self.safe_integer(subscription, 'limit', 100) self.orderbooks[symbol] = self.order_book({}, limit) elif type == 'detail_order_book': limit = self.safe_integer(subscription, 'limit', 100) self.orderbooks[symbol] = self.indexed_order_book({}, limit) elif type == 'diff_order_book': limit = self.safe_integer(subscription, 'limit') self.orderbooks[symbol] = self.order_book({}, limit) # fetch the snapshot in a separate async call self.spawn(self.fetch_order_book_snapshot, client, message, subscription) def handle_subscription_status(self, client, message): # # { # 'event': "bts:subscription_succeeded", # 'channel': "detail_order_book_btcusd", # 'data': {}, # } # channel = self.safe_string(message, 'channel') subscription = self.safe_value(client.subscriptions, channel, {}) method = self.safe_value(subscription, 'method') if method is not None: method(client, message, subscription) return message def handle_subject(self, client, message): # # { # data: { # timestamp: '1583656800', # microtimestamp: '1583656800237527', # bids: [ # ["8732.02", "0.00002478", "1207590500704256"], # ["8729.62", "0.01600000", "1207590502350849"], # ["8727.22", "0.01800000", "1207590504296448"], # ], # asks: [ # ["8735.67", "2.00000000", "1207590693249024"], # ["8735.67", "0.01700000", "1207590693634048"], # ["8735.68", "1.53294500", "1207590692048896"], # ], # }, # event: 'data', # channel: 'detail_order_book_btcusd' # } # channel = self.safe_string(message, 'channel') subscription = self.safe_value(client.subscriptions, channel) type = self.safe_string(subscription, 'type') methods = { 'live_trades': self.handle_trade, # 'live_orders': self.handle_order_book, 'order_book': self.handle_order_book, 'detail_order_book': self.handle_order_book, 'diff_order_book': self.handle_order_book, } method = self.safe_value(methods, type) if method is None: return message else: return method(client, message) def handle_error_message(self, client, message): return message def handle_message(self, client, message): if not self.handle_error_message(client, message): return # # { # 'event': "bts:subscription_succeeded", # 'channel': "detail_order_book_btcusd", # 'data': {}, # } # # { # data: { # timestamp: '1583656800', # microtimestamp: '1583656800237527', # bids: [ # ["8732.02", "0.00002478", "1207590500704256"], # ["8729.62", "0.01600000", "1207590502350849"], # ["8727.22", "0.01800000", "1207590504296448"], # ], # asks: [ # ["8735.67", "2.00000000", "1207590693249024"], # ["8735.67", "0.01700000", "1207590693634048"], # ["8735.68", "1.53294500", "1207590692048896"], # ], # }, # event: 'data', # channel: 'detail_order_book_btcusd' # } # event = self.safe_string(message, 'event') if event == 'bts:subscription_succeeded': return self.handle_subscription_status(client, message) else: return self.handle_subject(client, message)
// Dump returns unexpired cache entries in the cache. They are deduplicated, // but not usefully sorted. These objects should not be modified. func (c *DNSCache) Dump() (lookups []*cacheEntry) { c.RLock() defer c.RUnlock() lookups = make([]*cacheEntry, 0, len(c.forward)) for _, entries := range c.forward { for _, entry := range entries { lookups = append(lookups, entry) } } sort.Slice(lookups, func(i, j int) bool { return uintptr(unsafe.Pointer(lookups[i])) < uintptr(unsafe.Pointer(lookups[j])) }) deduped := lookups[:0] for readIdx, lookup := range lookups { if readIdx == 0 || deduped[len(deduped)-1] != lookups[readIdx] { deduped = append(deduped, lookup) } } return deduped }
#define _GNU_SOURCE #include <string> #include <memory> #include <stack> #include <map> #include <queue> #include <algorithm> #include <iostream> #include <unistd.h> using namespace std; int getPriority(const char& ch) { if(ch == '+' || ch == '-') return 1; if(ch == '*' || ch == '/') return 2; return -1; } bool isDigit(char ch) { if (ch >= '0' && ch <= '9') { return true; } return false; } bool isOperator(string str) { if (str[0] >= '0' && str[0] <= '9') { return false; } return true; } int calculate(int left, int right, const string& op) { int result = 0; const char& ch = op[0]; switch (ch) { case '+': result = left + right; break; case '-': result = left - right; break; case '*': result = left * right; break; case '/': result = left / right; break; } return result; } int getSuffix(const string& expression) { vector<string> suffix; stack<char> operators; size_t i = 0, len = expression.size(); while(i < len) { char cur = expression[i]; if(cur == ' ') { ++i; continue; } if(isDigit(cur)) { // 数字直接入栈 size_t start = i; while(isDigit(expression[i])) { ++i; } suffix.push_back(expression.substr(start, i - start)); } else { if(operators.empty() || cur == '(') { operators.push(cur); } else { if(cur != ')') { // " + - * / " while (!operators.empty() && getPriority(operators.top()) >= getPriority(cur)){ suffix.emplace_back(1, operators.top()); operators.pop(); } operators.push(cur); } else { // 右括号:将栈中左括号之后入栈的运算符全部出栈到表达式结果,左括号出栈 while (operators.top() != '(') { suffix.emplace_back(1, operators.top()); operators.pop(); } operators.pop(); } } ++i; } } while(!operators.empty()) { suffix.emplace_back(1, operators.top()); operators.pop(); } stack<int> nums; for(const auto& str : suffix) { if(isOperator(str)) { int num_1 = nums.top(); nums.pop(); int num_2 = nums.top(); nums.pop(); nums.push(calculate(num_2, num_1, str)); } else { nums.push(atoi(str.c_str())); } } return nums.top(); } int main() { char buffer[1024]; getcwd(buffer, sizeof(buffer)); string path(buffer); size_t idx = path.find("Cpp"); cout << path << ":" << idx << endl; string test = "21+((42-2)*15+6)-18"; cout << getSuffix(test) << endl; }
** All donations are TAX-DEDUCTIBLE through Truth Aid. Truth Aid is a nonprofit social venture that produces media to raise awareness about important social issues. ** We have raised 60% of our budget to date. We now need to raise the remaining $150,000, but our goal is to raise $30,000 through this campaign which will allow us to begin principal photography in the Spring of 2012. Every dollar helps! If you make a pledge and we do not meet our goal of $30,000.00 your pledge returns to you. In fact, your credit card is only deducted if we meet our goal. THE FILM: Oblivion is a feature length narrative film based on a true story about the legal precedent setting court case that outlawed the practice of abduction for marriage in Ethiopia – also referred to as “telefa”. It tells the story of a 14-year old girl named Aberash Bekele who was accused of murder after killing the 29-year old man who raped, beat, and abducted her in an attempt to marry her. Aberash was defending herself but customary practice did not allow it. She was charged with murder by the local police and kept in prison without bail. A female lawyer named Meaza Ashenafi heard about the case and decided to represent Aberash. With the odds stacked against them, these two women fight a drawn out legal battle over a harrowing two year period that turns a new chapter in Ethiopiaʼs history and changes the lives of Ethiopian women forever. Oblivion will be shot in Ethiopia on 35mm film in Spring of 2012. The film’s spoken language is Amharic, the national language of Ethiopia. Oblivion has received support from Panavison Cameras, Pipeline Production Support, Ministry of Culture Ethiopia, Julie Mehretu, Jessica Rankin and many other amazing individuals like you.
Urinary Tract Infection Among Women in Child Bearing age in Rajshahi Region 1. Lecturer, Department of Microbiology, Rajshahi Medical College, Rajshahi, Bangladesh. 2 Lecturer, Department of Microbiology, Rajshahi Medical College, Rajshahi, Bangladesh. 3 Associate Professor, Department of Microbiology, Rajshahi Medical College, Rajshahi, Bangladesh. 4 Professor, Department of Microbiology, Rajshahi Medical College, Rajshahi, Bangladesh. 5 Lecturer, Department of Virology, Rajshahi Medical College, Rajshahi, Bangladesh. 6 Professor, Institute of Biological Sciences, University of Rajshahi, Bangladesh. Introduction The term "Urinary Tract Infection" (UTI) is used to describe either an infection of part or all parts of the urinary system. UTI are some of the most common bacterial infections, affecting 150 million people each year worldwide. 1 Woman particularly in their reproductive age group are prone to urinary tract infection due to their short urethra and close proximity of urethral opening to anal opening. Furthermore general habit and hygiene during menstruation, greatly influences infection in this age group. UTI is defined as the presence of >10 5 cfu/milliliter of urine in an either symptomatic or asymptomatic patients. Positive urine culture with accompanying pyuria (>5 WBC/HPF) is the surest feature of UTI. 2 The main organisms are E.coli, Klebsiella, proteus, Pseudomonas, Staphylococcus, Enterococcus. 3 These common organisms are showing increase resistance to commonly prescribed antibiotics which is a major concern nowadays. There are isolated reports if UTI from various parts of Bangladesh but comprehensive data in the northern region is lacking particularly in women in childbearing age. The information emerging from the present study is believed to be useful in understanding the proportion and etiological agents of UTIs and their sensitivities to available drugs which in turn would be of immense value to rational selections and use of antimicrobial agents. Methodology This study was a cross sectional type of descriptive study carried out in the Molecular Biology laboratory, Institute of Biological Sciences, University of Rajshahi during the period of July 2014 to June 2017 availing also some of the laboratory facilities of Department of Microbiology, Rajshahi Medical College. A total of 450 women in their child bearing age (15-45 years) were included in this study. Urine samples were screened by microscopic examination to detect significant pyuria (5 pus cell/ HPF) and were further cultured on Mac Conkey agar, Blood agar and Nutrient agar following all standard procedure. Positive growths were confirmed by relevant biochemical tests and subject to antibiotic susceptibility test (kirby bouer disk diffusion method). All clinical information was received in predesigned data sheet and lab reports were recorded and analyzed. Results Out of the 450 patients one hundred fifty one (151) patients were diagnosed of having UTI by positive urine culture. The proportion rate of UTI of the present study population was therefore 33.55% . Among the 151 confirm diagnosed UTI patients, asymptomatic UTI (Group A) was diagnosed in 54 (35.7%) women whereas symptomatic UTI (Group B) was found in 97 (64.2%) patients. The proportion of symptomatic UTI was therefore higher than asymptomatic UTI. Table I. Distribution of asymptomatic UTI (Group A) and symptomatic UTI (Group-B) subjects with respect to medicodemographic characteristics (n= 151). Data presented in Table 1 shows that majority of the UTI incidence was observed in married women 95 (62.91%) and the number of symptomatic cases is 57 (37.75%). Symptomatic UTI is also seen at a higher rate 68 (45.03%) among pregnant women compared to non-pregnant cases 29 (19.20%). The proportion of symptomatic UTI cases were observed higher in diabetic patients 61 (40.40%) in comparison with non-diabetic cases 36 (23.84%). The table III shows the proportion of UTI among sanitary napkin users and non-users. Women's using sanitary napkins regularly had a lower 31 (20.53%) incidence rate of UTI as compared to those not using sanitary napkin 74 (49%). Discussion Against the background of paucity of reports of urinary tract infection in the tertiary level hospitals in the Rajshahi, this study was aimed at determining the proportion of UTI in reproductive age group of female. Among the 450 patients screened for detection of UTI, 151 patients have been diagnosed to be suffering from UTI (both symptomatic and asymptomatic). Thus the proportion rate of UTI of the study population was 33.55%. Among the 151 confirmed diagnosed UTI patients, asymptomatic UTI was diagnosed in 54 (35.77%) women whereas symptomatic UTI in 97 (64.2%) patients. The proportion of symptomatic UTI was therefore higher than asymptomatic UTI (Table I). The increased proportion of UTI cases among female in their reproductive age group is due to their shorter urethra and close proximity of urethral opening to anal opening. Again, the higher rate of symptomatic UTI among women may be due to lack of awareness prior to development of symptom. The distribution of asymptomatic UTI and symptomatic UTI subjects with respect to medico-demographic characteristics i.e. marital status (single, married, widow/divorced), pregnancy (pregnant, non-pregnant), diabetes (diabetic, non-diabetic) were observed in the current study. In this study, most (62.91%) of the UTI sufferers were married female whereas nearly half percentage (32.45%) suffering This finding of ours is in good agreement with the findings 4 in Nigeria, who also reported in their study that the incidence of UTIs was higher in married women (78.51%) than in the singles (59.47%). The reason behind this high rate of UTI among married women may be due to increased sexual activity. 5 Again incidence of UTIs with respect to marital status revealed higher proportion of both asymptomatic UTI (25.20%) and symptomatic UTI (37.7%) in married subjects as compared to singles where the incidence rate of asymptomatic UTI was (7.9%) and symptomatic UTI was (24.5%). UTI is also a common problem in pregnancy due to the increase in sex hormones and the anatomical and physiological changes during pregnancy. The pregnant women are at high risk of UTI. During pregnancy, the chemical constitution of urine is also affected and results in increased urinary substance e.g. glucose and amino acids which may facilitate bacterial growth in urine. 6 However, this seemingly benign condition may have serious consequences if left untreated. In the present study, the predominance of both asymptomatic and symptomatic UTIs among pregnant women (31.7% vs. 45%) was noticed as compared to non-pregnant (29.63% vs. 29.90%). Thus the incidence of UTI was more than twice higher in pregnant women than in non-pregnant women. This finding of this study agrees very well with the earlier study. Diabetes mellitus (DM) has long been considered to be a predisposing factor for UTI. 9 In a study 10 , the prevalence of UTI in diabetic subjects were found to be higher when compared with non-diabetic subjects (9% vs. 0.78%). The prevalence of symptomatic and asymptomatic UTIs was observed to be higher (nearly twice) in diabetic subjects (40.4% vs. 21.2%) as compared to non-diabetic subjects (23.8% vs. 14.5%) as presented in Table I. This finding of this study is comparable with the findings of Sabahat Saeed in Pakistan. 11 Diabetes Mellitus, in general is a predisposing factor to various types of infection as it lowers the body immunity. Furthermore sugar in urine facilitates bacterial growth ultimately leading to UTI. Results of incidence of UTI in relation to use and no use of commercial sanitary napkin during menstrual cycle of women have been found to play a significant role in preventing the incidence of UTI. Women using sanitary napkins regularly had lower incidence rate (20.53%). Use of traditional clothes in place of sanitary napkin leads to unhygienic condition and creates a moist environment which facilitates bacterial growth and colonization. This finding of ours is similar to the observation of Ahmed and Avasarala 12 in Karimnagar district, AP, India who reported that the prevalence of UTI was significantly more (9.9%) in those girls using unsanitary pads during menstruation. Another study 4 in Nigeria also reported that the incidence of UTIs was 88.81% and 59.94% among women using tissue paper alone as sanitary napkin and tissue together with hygienic pads, respectively. Results of UTI based upon type of toilet use in our study clearly demonstrated very high proportion 77 (51.0%) among women not using sanitary latrine at all. In general the hygiene and cleanliness of women without sanitary latrine is very poor. Less use of water and soap leads to fecal contamination of urethra. However, there was no study available to be compared with our findings. Conclusion UTI has a high rate among women in their childbearing age, particularly when associated with pregnancy and diabetes. The predisposing factors for very high prevalence of UTI among women of the study area revealed from the present study appeared to be due to illiteracy, ignorance about UTI and its consequence and not practicing health and hygiene factors properly. Again, use of commercial sanitary napkin has been found to play a significant role in preventing incidence of UTI. But its use is still extremely low among the subjects of the study possibly due to the high prices of sanitary napkins, which is not affordable by them. Good personal hygiene and awareness can greatly reduce its incidence and prevent further complications.
// NewQAChecker creates a new checker. // By default, all errors are enabled; // call Configure to disable them selectively. func NewQAChecker(errorf func(format string, args ...interface{})) *QAChecker { ck := QAChecker{errorf: errorf, out: os.Stderr} ck.Configure("*_test.go", "*", "SetUpTest", -EMissingTest) ck.Configure("*_test.go", "*", "TearDownTest", -EMissingTest) ck.Configure("gobco_*.go", "gobco*", "*", -EMissingTest) ck.Configure("gobco_*.go", "", "gobco*", -EMissingTest) return &ck }
<gh_stars>1-10 from django.template import Context from django.template import loader from django.http import Http404 from django.http import HttpResponse from django.shortcuts import render_to_response from django.conf import settings from django.core import serializers from django.http import HttpResponsePermanentRedirect from django.core.urlresolvers import reverse from django.core import serializers from r66.models import * # def _save_session_messages(request, messages, errors): # request.session['messages']=messages # request.session['errors']=errors # # request.session.modified = True def _pop_session_messages(request): messages = [] errors = [] try: messages = request.session['messages'] request.session['messages']=[] except KeyError: pass try: errors = request.session['errors'] request.session['errors']=[] except KeyError: pass request.session.modified = True return messages,errors def _create_context(request): context_dict={} messages,errors = _pop_session_messages(request) context_dict['messages']=messages context_dict['error_messages']=errors # context_dict['nav_global']=settings.MENU_BAR return context_dict def serialized_array_to_dict (a): res = {} for i in a: # if res.has_key(i["name"]): # res[i["name"]].append(i["value"]) # else: # res[i["name"]] = [i["value"]] res[i["name"]] = i["value"] return res
{-# LANGUAGE NamedFieldPuns #-} module Sol08 (run ) where import Text.Parsec import Text.Parsec.String import Debug.Trace (trace) import Data.Maybe (fromMaybe) import qualified Data.Map as M import Data.List (maximumBy, sort) import Data.Ord (comparing) import Control.Monad.Writer type Op = (Int -> Int -> Int) type Comp = (Int -> Int -> Bool) data Inst = Inst { reg :: String , op :: Op , val :: Int , compReg :: String , comp :: Comp , compVal :: Int } parseOp :: Parser Op parseOp = (string "inc" >> (return (+))) <|> (string "dec" >> (return (-))) parseComp :: Parser Comp parseComp = do s <- many1 (oneOf "<>!=") return $ case s of "<=" -> (<=) ">=" -> (>=) "==" -> (==) "!=" -> (/=) ">" -> (>) "<" -> (<) parseInt :: Parser Int parseInt = do s <- many1 (char '-' <|> digit) return $ read s -- vyo inc -735 if hnh > -7 -- bz dec -959 if gx < 9 parseInst :: Parser Inst parseInst = do reg <- many1 letter char ' ' op <- parseOp char ' ' val <- parseInt string " if " compReg <- many1 letter char ' ' comp <- parseComp char ' ' compVal <- parseInt newline return $ Inst { reg = reg, op = op, val = val, compReg = compReg, comp = comp, compVal = compVal } type Env = M.Map String Int runInst :: Env -> Inst -> Writer [Int] Env runInst e (Inst{reg, op, val, compReg, comp, compVal}) = if (comp (fromMaybe 0 (M.lookup compReg e)) compVal) then let newV = op (fromMaybe 0 (M.lookup reg e)) val in do tell [newV] return $ M.insert reg newV e else return e part1 :: [Inst] -> (String, Int) part1 insts = let final = foldM runInst M.empty insts (result, _) = runWriter final :: (Env, [Int]) in maximumBy (comparing snd) (M.toList result) part2 :: [Inst] -> Int part2 insts = let final = foldM runInst M.empty insts (result, vals) = runWriter final :: (Env, [Int]) in last $ (sort vals) parseInput :: String -> String -> Either ParseError [Inst] parseInput fname input = parse (many parseInst) fname input run :: IO () run = let fname = "data/08.txt" in do input <- readFile fname putStrLn $ "part 1: " ++ (show (fmap part1 (parseInput fname input))) putStrLn $ "part 2: " ++ (show (fmap part2 (parseInput fname input)))
Biocompatibility of mineral trioxide aggregate flow and biodentine AIM To evaluate the influence of powder-to-gel ratio (0.19 g powder to 50 μL of gel, thick MTA Flow, and 0.06 g powder to 50 μL of gel, fluid MTA Flow) on biocompatibility of MTA Flow (Ultradent Products Inc., South Jordan, UT, USA, lot: 2015122901) and compare it with Biodentine (Septodont Inc., Saint-Maur-des-Fossés, France, lot: B18542A). METHODOLOGY The materials were manipulated and inserted into polyethylene tubes for implantation in twenty rats. After 7, 15, 30 and 60 days, the specimens were removed and embedded in paraffin. Haematoxylin and eosin sections were used to count the number of inflammatory cells (IC) and fibroblasts mm-2 (Fb). In the Masson's trichrome-stained sections, the fibrous capsule thickness was measured; picrosirius red-stained sections were used for birefringent collagen quantification. The data were submitted to two-way ANOVA and Tukey test (P ≤ 0.05). RESULTS A significantly lower number of IC and consequently higher number of Fb were observed in the capsules adjacent to thick MTA Flow at all periods, in comparison with other materials (P ≤ 0.05). At 60 days, the quantity of birefringent collagen was significantly greater in the tissue in contact with thick MTA Flow, when compared with fluid MTA Flow and Biodentine. CONCLUSIONS Although thick MTA Flow induced a less intense inflammatory response, all evaluated materials are biocompatible because they allowed regression of this process after 60 days.
<gh_stars>10-100 package windowsupdate import ( "errors" "github.com/go-ole/go-ole" "github.com/go-ole/go-ole/oleutil" ) type Update struct { disp *ole.IDispatch Identity IUpdateIdentity Title string IsDownloaded bool IsInstalled bool } type IUpdateIdentity struct { RevisionNumber int32 UpdateID string } var UpdateNotFoundError = errors.New("Update not found") func (s *Session) FindByUpdateID(updateID string) (Update, error) { updates, err := s.Search("UpdateID='" + updateID + "'") if err != nil { return Update{}, err } if len(updates) == 0 { return Update{}, UpdateNotFoundError } return updates[0], nil } func (s *Session) Search(criteria string) ([]Update, error) { searcher, err := toIDispatchErr(oleutil.CallMethod((*ole.IDispatch)(s), "CreateUpdateSearcher")) if err != nil { return nil, err } result, err := toIDispatchErr(oleutil.CallMethod(searcher, "Search", criteria)) if err != nil { return nil, err } updatesDisp, err := toIDispatchErr(oleutil.GetProperty(result, "Updates")) if err != nil { return nil, err } return toUpdates(updatesDisp) } func toUpdates(updatesDisp *ole.IDispatch) ([]Update, error) { count, err := toInt32Err(oleutil.GetProperty(updatesDisp, "Count")) if err != nil { return nil, err } var updates []Update for i := 0; i < int(count); i++ { updateDisp, err := toIDispatchErr(oleutil.GetProperty(updatesDisp, "Item", i)) if err != nil { return nil, err } update, err := toUpdate(updateDisp) if err != nil { return nil, err } updates = append(updates, update) } return updates, nil } func toUpdate(updateDisp *ole.IDispatch) (update Update, err error) { update.disp = updateDisp identity, err := toIDispatchErr(oleutil.GetProperty(updateDisp, "Identity")) if err != nil { return update, err } if update.Identity.RevisionNumber, err = toInt32Err(oleutil.GetProperty(identity, "RevisionNumber")); err != nil { return update, err } if update.Identity.UpdateID, err = toStrErr(oleutil.GetProperty(identity, "UpdateID")); err != nil { return update, err } if update.Title, err = toStrErr(oleutil.GetProperty(updateDisp, "Title")); err != nil { return update, err } if update.IsDownloaded, err = toBoolErr(oleutil.GetProperty(updateDisp, "IsDownloaded")); err != nil { return update, err } if update.IsInstalled, err = toBoolErr(oleutil.GetProperty(updateDisp, "IsInstalled")); err != nil { return update, err } return update, nil }
import * as React from "react"; import { IEmojiProps } from "../../styled"; const SvgPersonGesture19 = (props: IEmojiProps) => ( <svg viewBox="0 0 72 72" width="1em" height="1em" {...props}> <path fill="#92d3f5" d="M22.494 65l-.218-2.813a23.426 23.426 0 01-4.336-3.518L21.917 48.6c1.023-2.591 1.635-3.56 3.023-2.591 3.007 2.099 6.346 2.922 7.625 3.85 4.328 3.692 12.34 4.625 19.34 1.46 3 0 9.57 3.821 9.57 9.09v4.546z" /> <g fill="#fadcbc"> <path d="M31.907 32.508v.957c0 7.649 4.61 13.642 10.494 13.642s10.494-5.993 10.494-13.642c0-4.654-1.778-7.516-5.284-11.135a64.146 64.146 0 01-8.988 5.772c-2.647 1.514-6.255 3.57-6.716 4.406zM18.63 55.99c-2.624-3.249-5.712-4.458-6.212-6.458-.543-2.169-3.193-15.63-2.192-19.88.4-2.535.484-5.111.25-7.666-.06-.998.844-4.688.844-6.688a15.052 15.052 0 01.375-3.938c1.188-1.725 2.872-.732 2.898.498l1.039-4.342c.187-1.28 3.698-2.219 3.188 1.25l.45-2.48c.681-1.426 2.38-1.16 2.758-.693.622.809.303 1.542.256 2.536 0 0 2.254-.019 1.67 3.002-.19.982-1.632 10.23-1.632 10.23.458-.458 1.54-4.699 3.124-4.699 2.119 0 1.904 1.65 1.904 1.65s-.603 1.433-1.281 2.673c-1.012 1.851-2.281 5.5-3.281 6.5a45.64 45.64 0 01-5.222 3l2.91 12.25s.167.538 2.496 2.256" /> </g> <path fill="#a57939" d="M59.754 37.19a23.742 23.742 0 01-1.158-2.634c-1.472-3.27-2.023-4.492-2.348-10.523.027-3.693-3.794-6.785-7.475-7.093a1.002 1.002 0 01-.689-.344c-.092-.1-2.408-2.654-7.243-2.654-6.2 0-11.44 3.382-12.743 8.226a18.519 18.519 0 00-.564 6.176 13.317 13.317 0 01-.76 5.96 38.884 38.884 0 01-3.12 4.735l-1.678 4.439c.92.947 9.785 7.629 11.505 7.553 4.953 1.29 10.896 2.916 18.7 0a8.343 8.343 0 014.209 1.514 22.784 22.784 0 002.972-2.33c1.188-.715 3.125-8.283.392-13.026zM41.56 47.93c-10.956-.047-10.104-15.429-10.104-15.429C33.769 30.108 46.95 22.33 46.95 22.33c6.08 1.955 6.383 10.972 6.383 10.972S52.516 47.977 41.56 47.93z" /> <path d="M48.332 32.972a2 2 0 11-2-2 2 2 0 012 2M40.332 32.972a2 2 0 11-2-2 2 2 0 012 2" /> <path fill="none" stroke="#000" strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M39.46 40.972a6.553 6.553 0 006 0M22.474 63.99a2.982 2.982 0 00-.842-2.698c-3.158-2.446-4.158-3.302-4.158-3.302l5-12M20.183 6.782c-.71 3.924-1.125 6.631-1.71 11.208M21.542 21.365c.293-2.162 1.098-6.75 1.557-11.375" /> <path fill="none" stroke="#000" strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M61.474 63.99s.09-2.25.09-3c0-7-9.09-10-9.09-10-7 2-11 2-16 1-3.276-.655-10.027-4.59-15-8 0 0-3.91-9.5-3.91-13.5 0-.583 5.552-1.994 7-6.5.782-2.431 1.811-5.674 1.811-5.674M14.474 17.99c.86-5.076 1.564-8.621 2-10M18.628 55.221c-5-4-6.588-5.12-7.154-8.231-2-11-1-14.582-1-18 0-1 .015 0 .015-6 .001-.812.508-4.394.985-7a25.278 25.278 0 011-4" /> <path fill="none" stroke="#000" strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M24.141 40.242c.615-1.773 1.498-3.493 2.192-5.275 1.635-4.2-.21-6.452 1.29-12.035 1.48-5.5 7.278-8.965 13.71-8.965 5.4 0 8 3 8 3a8.132 8.132 0 017.405 8.037c.387 7.18 1.024 7.152 3.429 12.822a11.599 11.599 0 01-1.447 11.663" /> <path fill="none" stroke="#000" strokeLinejoin="round" strokeWidth={2} d="M31.333 33.967c0 8 5 14 11 14s11-6 11-14c0-5-2-8-6-12-4 4-16 9-16 11z" /> </svg> ); export default SvgPersonGesture19;
"Hotness" stability of chicken hot-wing products as affected by preparation methods and storage. Chicken hot-drumettes containing 2% cayenne pepper were prepared using three different cooking methods and stored under two different temperatures for "hotness" evaluation. Results demonstrated that the hotness of the samples were highest when products were cooked in a microwave oven, followed by convection oven, and least hot by deep-fat frying. Regardless of the cooking method, the degree of hotness of the hot-drumette decreased drastically during refrigerated storage. Results also indicated that the loss of hotness was associated with the increase of 2-thiobarbituric acid (TBA) values in the product. The addition of antioxidants such as butylated hydroxyanisole (BHA), butylated hydroxytoluene (BHT), rosemary, and sodium nitrate during preparation or vacuum packaging retarded lipid oxidation of chicken hot-drumettes and retarded the loss of hotness of the products during refrigerated storage.
The effect of reagent excitation on the dynamics of the reaction O(1D2)+H2→OH(X 2Π)+H The effect of H2 translational, rotational, and vibrational excitation on the dynamics of the O(1D2)/H2 reaction are explored in a semiclassical trajectory study involving both of the energetically accessible potential energy surfaces of the system. Landau–Zener probabilities determine surface hopping. At low reagent excitation, the deep H2O potential minimum dominates the dynamics, causing the reagents to reorient towards a H–O–H (insertion) configuration and form the H2O intermediate, irrespective of the initial approach geometry. High vibrational excitation enhances the probability for transitions onto the excited state potential during the interaction. Reactions which sample the excited state potential have fundamentally different dynamics from those which remain on the lower state. For reactions involving H2(v=4), the OH product has a bimodal vibrational distribution, peaking in OH(v’=2) and OH(v’=9). The lower peak is due to reactions which access the excited state potential; the higher peak results...
def _render_object(self, obj, *context, **kwargs): loader = self._make_loader() if isinstance(obj, TemplateSpec): loader = SpecLoader(loader) template = loader.load(obj) else: template = loader.load_object(obj) context = [obj] + list(context) return self._render_string(template, *context, **kwargs)
// Note: the buffer in result must be at least readBufferSize long private void readRecordAtOffset(long recordFileOffset, ReaderResult result) throws IOException { result.getBuffer().rewind(); result.getBuffer().limit(readBufferSize); recordFile.read(result.getBuffer(), recordFileOffset); result.getBuffer().rewind(); int recordSize = EncodingHelper.decodeLittleEndianVarInt(result.getBuffer()); int bytesInRecordSize = result.getBuffer().position(); if (result.getBuffer().remaining() < recordSize) { int newSize = recordSize + EncodingHelper.MAX_VARINT_SIZE; result.requiresBufferSize(newSize); result.getBuffer().position(0); result.getBuffer().limit(recordSize); int bytesRead = 0; while (bytesRead < recordSize) { int bytesReadTemp = recordFile.read(result.getBuffer(), recordFileOffset + bytesInRecordSize + bytesRead); if (bytesReadTemp == -1) { break; } bytesRead += bytesReadTemp; } result.getBuffer().position(0); } result.getBuffer().limit(recordSize + result.getBuffer().position()); }
/** * Checks the Body class to make sure update works. */ private static void checkUpdate() { System.out.println("Checking update..."); Body b1 = new Body(1.0, 1.0, 3.0, 4.0, 5.0, "jupiter.gif"); b1.update(2.0, 1.0, -0.5); checkEquals(3.4, b1.xxVel, "xxVel update()", 0.01); checkEquals(3.8, b1.yyVel, "yyVel update()", 0.01); checkEquals(7.8, b1.xxPos, "xxPos update()", 0.01); checkEquals(8.6, b1.yyPos, "yyPos update()", 0.01); }
<reponame>joaompinto/2DExplorer import Skill class mining(Skill.skill): def __init__(self,owner,overlay): super().__init__('Mining',owner,overlay,99) self.baseSpeed = 0.4 self.mineSpeed = self.baseSpeed self.speedDecrease = -0.004 def setLevel(self,lvl): super().setLevel(lvl) self.mineSpeed = self.baseSpeed + (lvl - 1) * self.speedDecrease
def quarter_end_date(self): assert self._QUARTER_NUM_TO_DATE[1][0] == (self._FISCAL_START_MONTH, self._FISCAL_START_DAY) end_date = self._QUARTER_NUM_TO_DATE[self.quarter][1] end_year = self.year if self.quarter > 1 else self.year-1 return date(end_year, *end_date)
def _define_host(self, connector): domain = self._get_pool_domain(connector) host_bunch = self._get_bunch_from_host(connector) host = self._call_xiv_xcli( "host_list", host=host_bunch['name']).as_list connection_type = self._get_connection_type() if len(host) == 0: LOG.debug("Non existing host, defining") host = self._define_host_according_to_chap( host=host_bunch['name'], in_domain=domain) host_bunch = self._get_bunch_from_host(connector, host.get('id')) else: host_bunch = self._get_bunch_from_host(connector, host[0].get('id')) LOG.debug("Generating hostname for connector %(conn)s", {'conn': connector}) generated_hostname = storage.get_host_or_create_from_iqn( connector, connection=connection_type) generated_host = self._call_xiv_xcli( "host_list", host=generated_hostname).as_list if len(generated_host) == 0: host = self._define_host_according_to_chap( host=generated_hostname, in_domain=domain) else: host = generated_host[0] host_bunch = self._get_bunch_from_host( connector, host.get('id'), host_name=generated_hostname) LOG.debug("The host_bunch: %s", host_bunch) return host_bunch
package googletrans import ( "testing" ) func TestDo(t *testing.T) { params := TranslateParams{ Src: "auto", Dest: "zh-CN", Text: "Go is an open source programming language that makes it easy to build simple, reliable, and efficient software. ", } transData, err := defaultTranslator.do(params) if err != nil { t.Error(err) } t.Logf("%+v\n", transData) } func TestTranslate(t *testing.T) { params := TranslateParams{ Src: "auto", Dest: "zh-CN", Text: "Go is an open source programming language that makes it easy to build simple, reliable, and efficient software. ", } translated, err := defaultTranslator.Translate(params) if err != nil { t.Error(err) } t.Logf("%+v\n", translated) } func TestDetect(t *testing.T) { text := "Go is an open source programming language that makes it easy to build simple, reliable, and efficient software. " detected, err := defaultTranslator.Detect(text) if err != nil { t.Error(err) } t.Logf("%+v\n", detected) }
Biphasic, energy-efficient, current-controlled stimulation back-end for retinal visual prosthesis This paper reports an energy-efficient waveform generator, dedicated to implantable retinal microstimulators. The circuit features flexible current-mode stimuli such as rising and falling exponential pulses in addition to rectangular pulses. In order to apply the stimulation current to the electrode at the defined current levels (±96μA) and with sufficient voltage headroom (±3V), a class AB second generation current conveyor is designed as the output stage. To upconvert the 1.2-V supply voltage to ±3.3V, the output stage is equipped with an on-chip electrode-tissue driver. Duration of the generated current pulses is programmable within the range of 100μs to 3ms. Current-steering DACs are used to set the amplitudes of pulses. They exhibit DNL and INL of 0.04 and 0.17LSB, respectively. The amplitude, duration, and time constant of exponential pulses are independently programmable. Designed in the IBM in 130nm process, the circuit consumes 1.5×1.5 mm2 of silicon area. Post-layout simulation results indicate that the stimuli generator meets expected requirements when connected to electrode-tissue impedance as high as 30kΩ. The proposed design consumes a maximum of 1.2mW in the rectangular pulse mode.
import { ClientService } from "./client/client.service"; import { DbproviderService } from "./dbprovider/dbprovider.service"; export const dbConnectionFactory = { provide: 'ClientConnection', useFactory : (clientSerice: ClientService) => { return new DbproviderService(clientSerice.getClientDetails().db); }, inject: [ClientService] }
def next_filename(filename, format="{basename:s}.{number:03d}{extension:s}", suffix=None, plus=0, allow_natural=False, demand_natural=False): if filename is not None: filename = os.path.expanduser(filename) if demand_natural and os.path.exists(filename): raise FileExistsError(filename) if allow_natural and not os.path.exists(filename): return filename pathlocation, basename, extension = filename_split(filename) if suffix is not None: extension = "." + suffix fn = lambda n: os.path.join(pathlocation, format.format(basename=basename, extension=extension, number=n)) n = 1 while os.path.exists(fn(n)): n += 1 return fn(n + plus)
<reponame>faridyagubbayli/binvox from typing import Tuple import numpy as np from binvox import compress_flat_voxels class Binvox(object): """ Holds a binvox model. data is either a three-dimensional numpy boolean array (dense representation) or a two-dimensional numpy float array (coordinate representation). dims, translate and scale are the model metadata. dims are the voxel dimensions, e.g. [32, 32, 32] for a 32x32x32 model. scale and translate relate the voxels to the original model coordinates. """ def __init__(self, data=None, dims=None, translate=None, scale=None, axis_order=None, mode=None): self.data = data self.dims = dims self.translate = translate self.scale = scale self.axis_order = axis_order self.mode = mode assert axis_order in ('xzy', 'xyz') @staticmethod def read(filepath, mode: str, fix_coords=True): assert mode in ['dense', 'sparse'], 'Mode should be either `dense` or `sparse`.' with open(filepath, 'rb') as fp: dims, translate, scale = Binvox.read_header(fp) raw_data = np.frombuffer(fp.read(), dtype=np.uint8) methods = { 'dense': Binvox._parse_dense, 'sparse': Binvox._parse_sparse, } method = methods[mode] data, axis_order = method(raw_data, dims, fix_coords) return Binvox(data, dims, translate, scale, axis_order, mode) @staticmethod def read_header(fp): """ Read the binvox file header :param fp: File pointer :return: Tuple(Dims, Translate, Scale) """ """ Read binvox header. Mostly meant for internal use. """ line = fp.readline().strip() if not line.startswith(b'#binvox'): raise IOError('Not a binvox file') dims = list(map(int, fp.readline().strip().split(b' ')[1:])) translate = list(map(float, fp.readline().strip().split(b' ')[1:])) scale = list(map(float, fp.readline().strip().split(b' ')[1:]))[0] line = fp.readline() return dims, translate, scale @staticmethod def _parse_dense(raw_data, dims, fix_coords): """ Read binary binvox format as array. Returns the model with accompanying metadata. Voxels are stored in a three-dimensional numpy array, which is simple and direct, but may use a lot of memory for large models. (Storage requirements are 8*(d^3) bytes, where d is the dimensions of the binvox model. Numpy boolean arrays use a byte per element). Doesn't do any checks on input except for the '#binvox' line. """ # if just using reshape() on the raw data: # indexing the array as array[i,j,k], the indices map into the # coords as: # i -> x # j -> z # k -> y # if fix_coords is true, then data is rearranged so that # mapping is # i -> x # j -> y # k -> z values, counts = raw_data[::2], raw_data[1::2] data = np.repeat(values, counts).astype(np.bool) data = data.reshape(dims) if fix_coords: # xzy to xyz TODO the right thing data = np.transpose(data, (0, 2, 1)) axis_order = 'xyz' else: axis_order = 'xzy' return data, axis_order @staticmethod def _parse_sparse(raw_data, dims, fix_coords): """ Read binary binvox format as coordinates. Returns binvox model with voxels in a "coordinate" representation, i.e. an 3 x N array where N is the number of nonzero voxels. Each column corresponds to a nonzero voxel and the 3 rows are the (x, z, y) coordinates of the voxel. (The odd ordering is due to the way binvox format lays out data). Note that coordinates refer to the binvox voxels, without any scaling or translation. Use this to save memory if your model is very sparse (mostly empty). Doesn't do any checks on input except for the '#binvox' line. """ values, counts = raw_data[::2], raw_data[1::2] sz = np.prod(dims) index, end_index = 0, 0 end_indices = np.cumsum(counts) indices = np.concatenate(([0], end_indices[:-1])).astype(end_indices.dtype) values = values.astype(np.bool) indices = indices[values] end_indices = end_indices[values] nz_voxels = [] for index, end_index in zip(indices, end_indices): nz_voxels.extend(range(index, end_index)) nz_voxels = np.array(nz_voxels) # TODO are these dims correct? # according to docs, # index = x * wxh + z * width + y; // wxh = width * height = d * d x = nz_voxels / (dims[0]*dims[1]) zwpy = nz_voxels % (dims[0]*dims[1]) # z*w + y z = zwpy / dims[0] y = zwpy % dims[0] if fix_coords: data = np.vstack((x, y, z)) axis_order = 'xyz' else: data = np.vstack((x, z, y)) axis_order = 'xzy' return np.ascontiguousarray(data), axis_order def numpy(self): return self.data def write(self, filepath): """ Write binary binvox format. Note that when saving a model in sparse (coordinate) format, it is first converted to dense format. Doesn't check if the model is 'sane'. """ if self.mode == 'sparse': self.to_sparse() dense_data = self.numpy() with open(filepath, 'wb') as fp: header = "#binvox 1\n" + \ 'dim '+' '.join(map(str, self.dims))+'\n' + \ 'translate '+' '.join(map(str, self.translate))+'\n' + \ 'scale '+str(self.scale)+'\n' + \ 'data\n' fp.write(str.encode(header)) if self.axis_order == 'xzy': voxels_flat = dense_data.flatten() elif self.axis_order == 'xyz': voxels_flat = np.transpose(dense_data, (0, 2, 1)).flatten() else: raise NotImplementedError('Unsupported voxel model axis order') # keep a sort of state machine for writing run length encoding voxels_compressed = compress_flat_voxels(voxels_flat) BYTE_SIZE = 1 for entry in voxels_compressed: fp.write(entry.to_bytes(BYTE_SIZE, 'big')) def __copy__(self): data = self.data.copy() dims = self.dims[:] translate = self.translate[:] return Binvox(data, dims, translate, self.scale, self.axis_order) def to_sparse(self): if self.mode == 'sparse': return """ From dense representation to sparse (coordinate) representation. No coordinate reordering. """ assert self.data.ndim == 3, 'Data is wrong shape; should be 3D array.' self.data = np.asarray(np.nonzero(self.data), np.int) self.mode = 'sparse' def to_dense(self): if self.mode == 'dense': return assert self.data.ndim == 2 and self.data.shape[0] == 3, \ 'Data is wrong shape; should be 3xN array.' if np.isscalar(self.dims): self.dims = [self.dims]*3 dims = np.atleast_2d(self.dims).T # truncate to integers xyz = self.data.astype(np.int) # discard voxels that fall outside dims valid_ix = ~np.any((xyz < 0) | (xyz >= dims), 0) xyz = xyz[:,valid_ix] out = np.zeros(dims.flatten(), dtype=np.bool) out[tuple(xyz)] = True self.data = out self.mode = 'dense' def transform_coord(self, voxel_coord: Tuple[int, int, int]) -> Tuple[float, float, float]: """ Transform voxel coordinates i, j, k to original coordinates x, y, z: :param voxel_coord: Voxel coordinate :return: Original coordinate """ voxel_coord = np.array(voxel_coord, dtype=float) coord = voxel_coord / self.dims coord = self.scale * coord + self.translate return coord
/** * Defines a Specification of facts to match for a subscription. * * @author [email protected] * */ @Getter @Setter public class FactSpec { @NonNull @JsonProperty final String ns; @JsonProperty String type = null; @JsonProperty UUID aggId = null; @JsonProperty String jsFilterScript = null; @NonNull @JsonProperty final Map<String, String> meta = new HashMap<>(); public FactSpec meta(@NonNull String k, @NonNull String v) { meta.put(k, v); return this; } public static FactSpec forMark() { return FactSpec.ns(MarkFact.MARK_NS).type(MarkFact.MARK_TYPE); } public static FactSpec ns(String ns) { return new FactSpec(ns); } public FactSpec(@NonNull @JsonProperty("ns") String ns) { super(); this.ns = ns; } }
First Report of an Ovary Smut of Italian Thistle Caused by a Microbotryum sp. in Greece. Italian thistle (Carduus pycnocephalus L.), family Asteraceae, is a common weed in Greece. It is also a problematic invasive weed in the western United States and a target of biological control efforts. In May 2005, smutted capitula of Italian thistle were found in an abandoned field in Halkiades, Greece. A total of 38 smutted plants, representing approximately 20% of those plants present, were found in a portion of the field that was lightly infested with Italian thistle. In most cases, capitula of all diseased flowers were smutted. In one or two cases, capitula on some branches of the plants were smutted, whereas capitula on other branches were healthy. Diseased capitula were noticeably more globose than healthy ovoid capitula, and diseased capitula did not open completely. When diseased capitula were split open, the ovaries in all florets within the capitula were filled with powdery masses of smut teliospores. Diseased capitula were collected, air dried, and sent to the quarantine facility of the Foreign Disease-Weed Science Research Unit (FDWSRU), USDA/ARS, Fort Detrick, MD. Teliospores within the capitula were extracted and observed microscopically. Teliospores of isolate DB05-014 were relatively uniform in shape and size, globose, 12.0 to 17.3 × 12.3 to 18.0 μm (mean 14.5 × 15.1 μm), violet tinted pale to medium yellowish-brown; wall reticulate appearing as coarse, radiate wings on the spore margin, 5 to 7 polyangular meshes per spore diameter, muri, 0.7 to 2.0 μm high in optical median view appearing as gradually narrowing blunt spines, 0.5 to 1 μm wide at their basis; in scanning electron microscopy (SEM), the meshes were subpolygonal, wall and interspaces were finely verruculose. Teliospores were more globose and slightly smaller than the description of Microbotryum cardui (A. A. Fischer Waldh.) Vánky (2), but the mean sizes were within the described range. When compared with teliospores of M. cardui on C. acanthoides, the numbers of polyangular meshes per spore diameter were within the range of the description using SEM, but the muri were about one-half of the height of those described. Nucleotide sequences for the internal transcribed spacers (ITS 1 and 2) and 5.8S ribosomal region (GenBank Accession No. AY280460) were aligned with sequences of other smut fungi using the BLAST algorithm of the National Center for Biotechnology Information. The closest alignment of DB05-014 was with M. scorzonerae (590 of 627 bp identities or 94% with 2% gaps). No sequences of M. cardui were available for comparison, but only M. cardui has been reported on Carduus spp. (1,2). Another smut reported on a Carduus sp. is Thecaphora trailii (1). DB05-014 is a likely variant of M. cardui from a previously unknown host. Italian thistle is an annual plant that reproduces solely by seeds (achenes). Because of the lack of seed production on smutted plants and the systemic nature of the disease, this fungus has great potential as a biological control agent for Italian thistle in the United States. A voucher specimen has been deposited with the U.S. National Fungus Collections (BPI 871812). To our knowledge this is the first report of a Microbotryum sp. parasitizing C. pycnocephalus. References: (1) K. Vánky. European Smut Fungi. Gustav Fischer Verlag, Stuttgart, Germany, 1994. (2) K. Vánky and D. Berner. Mycotaxon 85:307, 2003.
<reponame>adampoulston/pronto<filename>tests/test_synonyms.py # coding: utf-8 from __future__ import absolute_import ### DEPS import six import unittest import io import sys import contextlib import os import shutil import gzip import os.path as op import warnings import textwrap from . import utils import pronto.synonym class TestProntoSynonymType(unittest.TestCase): def assertOk(self, synonymtype, scope): self.assertEqual(synonymtype.desc, 'British spelling') self.assertEqual(synonymtype.name, 'UK_SPELLING') self.assertEqual(synonymtype.scope, scope) self.assertIn(synonymtype.name, pronto.synonym.SynonymType._instances) def test_new_synonymtype_from_obo_with_scope(self): synonymtype = pronto.synonym.SynonymType.from_obo( 'UK_SPELLING "British spelling" EXACT' ) self.assertOk(synonymtype, "EXACT") def test_new_synonymtype_from_obo_without_scope(self): synonymtype = pronto.synonym.SynonymType.from_obo( 'UK_SPELLING "British spelling"' ) self.assertOk(synonymtype, None) def test_new_synonymtype_with_scope(self): synonymtype = pronto.synonym.SynonymType( 'UK_SPELLING', 'British spelling', 'EXACT', ) self.assertOk(synonymtype, "EXACT") def test_new_synonymtype_without_scope(self): synonymtype = pronto.synonym.SynonymType( 'UK_SPELLING', 'British spelling', ) self.assertOk(synonymtype, None) def test_fail_wrong_scope(self): with self.assertRaises(ValueError) as ctx: pronto.synonym.SynonymType( 'UK_SPELLING', 'British spelling', 'UNEXISTING_SCOPE', ) self.assertEqual(str(ctx.exception), "scope must be 'NARROW'" ", 'BROAD', 'EXACT', 'RELATED' or None") def test_obo_with_scope(self): obo_header = 'UK_SPELLING "British spelling" EXACT' synonymtype = pronto.synonym.SynonymType.from_obo(obo_header) self.assertEqual(synonymtype.obo, "synonymtypedef: {}".format(obo_header)) def test_obo_without_scope(self): obo_header = 'UK_SPELLING "British spelling"' synonymtype = pronto.synonym.SynonymType.from_obo(obo_header) self.assertEqual(synonymtype.obo, "synonymtypedef: {}".format(obo_header)) class TestProntoSynonym(unittest.TestCase): def tearDown(self): pronto.synonym.SynonymType._instances.clear() def assertOk(self, synonym, scope, synonymtype=None): self.assertEqual(synonym.desc, "The other white meat") self.assertEqual(synonym.scope, scope) self.assertEqual(synonym.xref, ['MEAT:00324', 'BACONBASE:03021']) if synonymtype is not None: self.assertEqual(synonym.syn_type, synonymtype) def test_new_synonym_from_obo_with_scope_with_syntype(self): synonymtype = pronto.synonym.SynonymType("MARKETING_SLOGAN", 'marketing slogan') synonym = pronto.synonym.Synonym.from_obo( '"The other white meat" EXACT MARKETING_SLOGAN [MEAT:00324, BACONBASE:03021]' ) self.assertOk(synonym, "EXACT", synonymtype) def test_new_synonym_from_obo_without_scope_with_syntype(self): synonymtype = pronto.synonym.SynonymType("MARKETING_SLOGAN", 'marketing slogan') synonym = pronto.synonym.Synonym.from_obo( '"The other white meat" MARKETING_SLOGAN [MEAT:00324, BACONBASE:03021]' ) self.assertOk(synonym, "RELATED", synonymtype) def test_new_synonym_with_scope_with_syntype(self): synonymtype = pronto.synonym.SynonymType("MARKETING_SLOGAN", 'marketing slogan') synonym = pronto.synonym.Synonym( "The other white meat", "BROAD", "MARKETING_SLOGAN", ["MEAT:00324", "BACONBASE:03021"], ) self.assertOk(synonym, "BROAD", synonymtype) def test_new_synonym_without_scope_with_syntype(self): synonymtype = pronto.synonym.SynonymType("MARKETING_SLOGAN", 'marketing slogan') synonym = pronto.synonym.Synonym( "The other white meat", None, "MARKETING_SLOGAN", ["MEAT:00324", "BACONBASE:03021"], ) self.assertOk(synonym, "RELATED", synonymtype) def test_new_synonym_from_obo_with_scope_without_syntype(self): synonym = pronto.synonym.Synonym.from_obo( '"The other white meat" EXACT [MEAT:00324, BACONBASE:03021]' ) self.assertOk(synonym, "EXACT") def test_new_synonym_from_obo_without_scope_without_syntype(self): synonym = pronto.synonym.Synonym.from_obo( '"The other white meat" [MEAT:00324, BACONBASE:03021]' ) self.assertOk(synonym, "RELATED") def test_new_synonym_with_scope_without_syntype(self): synonym = pronto.synonym.Synonym( "The other white meat", "BROAD", None, ["MEAT:00324", "BACONBASE:03021"], ) self.assertOk(synonym, "BROAD") def test_new_synonym_without_scope_without_syntype(self): synonym = pronto.synonym.Synonym( "The other white meat", None, None, ["MEAT:00324", "BACONBASE:03021"], ) self.assertOk(synonym, "RELATED") def test_new_synonym_inherit_scope_from_syn_type(self): synonymtype = pronto.synonym.SynonymType("MARKETING_SLOGAN", 'marketing slogan', "EXACT") synonym = pronto.synonym.Synonym( "The other white meat", None, "MARKETING_SLOGAN", ["MEAT:00324", "BACONBASE:03021"], ) self.assertOk(synonym, "EXACT", synonymtype) def test_new_synonym_force_scope_from_syn_type(self): synonymtype = pronto.synonym.SynonymType("MARKETING_SLOGAN", 'marketing slogan', "EXACT") synonym = pronto.synonym.Synonym( "The other white meat", "BROAD", "MARKETING_SLOGAN", ["MEAT:00324", "BACONBASE:03021"], ) self.assertOk(synonym, "EXACT", synonymtype) def test_fail_wrong_scope(self): synonymtype = pronto.synonym.SynonymType("MARKETING_SLOGAN", 'marketing slogan') with self.assertRaises(ValueError) as ctx: synonym = pronto.synonym.Synonym( "The other white meat", "WRONG", "MARKETING_SLOGAN", ["MEAT:00324", "BACONBASE:03021"], ) def test_fail_undefined_syn_type(self): with self.assertRaises(ValueError) as ctx: synonym = pronto.synonym.Synonym( "The other white meat", "BROAD", "WRONG_TYPE", ["MEAT:00324", "BACONBASE:03021"], )
<reponame>rileymiller/clara-riley-get-married<gh_stars>0 export const maxXS = 500 export const maxSM = 750 export const maxMD = 1023 export const bpMaxXS = `@media (max-width: ${maxXS}px)` export const bpMaxSM = `@media (max-width: ${maxSM}px)` export const bpMaxMD = `@media (max-width: ${maxMD}px)`
def restore(self, store_name: str, key: Optional[Union[List[str], str]] = None, overwrite_exist: bool = True, include_history: bool = False) -> bool: store_named_query = '{}_{}'.format(store_name, self._config['named_query_collection']) if the named query table exists then must be a graph if self._db.has_collection(store_named_query): if key is None: n_q_collections = self._db.execute_query('get_named_query', parameters={'named_query_col': {'value': store_named_query, 'type': 'collection'}}) else: if isinstance(key, list): keys = key else: keys = [key] n_q_collections = self._db.execute_query('get_filtered_named_query', parameters={'named_query_col': {'value': store_named_query, 'type': 'collection'}, 'filter_keys': {'value': keys, 'type': 'attribute'}}) node_keys = {} for nqe in n_q_collections: add a new graph if it doesnt exist if overwrite_exist or store_name not in self._store: self._store[store_name] = {nqe['named_query']: {'value': AMFGraph(), 'persisted': True, 'updated': False}} overwrite_exist = False elif nqe['named_query'] not in self._store[store_name]: self._store[store_name][nqe['named_query']] = {'value': AMFGraph(), 'persisted': True, 'updated': False} if nqe['named_query'] not in node_keys: node_keys[nqe['named_query']] = {} edges_to_restore = [] sub_graph = '{}_{}'.format(store_name, nqe['named_query']) if include_history: edges = self._db.execute_query('get_docs_via_sub_graph', parameters={'collection': {'value': nqe['collection'], 'type': 'collection'}, 'sub_graph': {'value': sub_graph, 'type': 'attribute'}}) else: edges = self._db.execute_query('get_docs_via_sub_graph_expiry_ts', parameters={'collection': {'value': nqe['collection'], 'type': 'collection'}, 'sub_graph': {'value': sub_graph, 'type': 'attribute'}, 'expiry_ts': {'value': None, 'type': 'attribute'}}) for e in edges: edges_to_restore.append(e) if e['_source_type'] not in node_keys[nqe['named_query']]: node_keys[nqe['named_query']][e['_source_type']] = set() if isinstance(e['_source_uid'], str) and ' ' in e['_source_uid']: node_keys[nqe['named_query']][e['_source_type']].add(e['_source_uid'].replace(' ', '_')) else: node_keys[nqe['named_query']][e['_source_type']].add(e['_source_uid']) if e['_target_type'] not in node_keys[nqe['named_query']]: node_keys[nqe['named_query']][e['_target_type']] = set() if isinstance(e['_target_uid'], str) and ' ' in e['_target_uid']: node_keys[nqe['named_query']][e['_target_type']].add(e['_target_uid'].replace(' ', '_')) else: node_keys[nqe['named_query']][e['_target_type']].add(e['_target_uid']) self._store[store_name][nqe['named_query']]['value'].restore_edges(edges=edges_to_restore) get nodes attributes for each named query for named_query in node_keys: for node_collection in node_keys[named_query]: nodes_to_restore = list(self._db.execute_query('get_docs_via_key', parameters={'collection': {'value': node_collection, 'type': 'collection'}, 'filter_keys': {'value': list(node_keys[named_query][node_collection]), 'type': 'attribute'}})) self._store[store_name][named_query]['value'].restore_nodes(nodes=nodes_to_restore) no named query collection exists so must be simple key value pair elif self._db.has_collection(store_name): if key is None: docs = self._db.execute_query('get_docs_in_collection', parameters={'collection': {'value': store_name, 'type': 'collection'}}) else: if isinstance(key, list): keys = key else: keys = [key] docs = self._db.execute_query('get_docs_via_key', parameters={'collection': {'value': store_name, 'type': 'collection'}, 'filter_keys': {'value': keys, 'type': 'attribute'}}) for doc in docs: if store_name not in self._store: self._store[store_name] = {} self._store[store_name][doc['_key']] = {'value': doc['_value'], 'persisted': True, 'updated': False} return True
The Government's budget deficit has blown out by $637 million, mainly because of a fall in tax revenue from most sectors. The Treasury today released the financial statements for the seven months to the end of January, showing the operating balance before gains and losses was $1.1 billion - more than twice the $426m deficit forecast in the December update. Acting chief government accountant Fergus Welsh said the lower tax take - $876m below forecast - was partly offset by lower core expenses and higher returns from Crown entities. "At this stage, it is difficult to determine how much of the lower than forecast tax is temporary versus permanent, but we expect this to become clearer over the next few months," he said. "Timing issues are likely to see some of the current variance narrow by year end." Factors contributing to timing issues included mismatching of GST refunds and receipts relating to export products and the Canterbury rebuild. Tax revenue from some large corporate taxpayers was not yet "visible" to Inland Revenue because of tax pooling schemes. Strong sharemarkets saw gains on financial instruments of $2.8b, $1.3b ahead of forecast. As a result, the operating balance surplus was $629m higher than forecast at $3.4b. Net debt was $631m higher than forecast at $59.9b, equivalent to 27.7 per cent of gross domestic product. Finance Minister Bill English said the Government remained on track to return to surplus next year, but he acknowledged that revenue was still some way below forecast. "This makes it even more important for the Government to continue carefully managing its spending as we target a return to surplus next year," he said. "While we have spending under control, government revenue can move around and is therefore more difficult to forecast." He said a return to surplus was "a challenging goal", and the need for discipline remained. "The extent to which tax revenue is likely to remain below forecast will become clearer as officials work through forecasts for the Budget," he said. Labour finance spokesman David Parker said the past three months of financial statements showed the books were worse than expected, raising questions about National's management of the economy. "If the economy is recovering, the tax take should be increasing and the deficit reducing. The opposite is happening," he said. "Serious questions need to be asked of National's economic credibility." In November, December and January, National's books were worse than predicted, he said. At the same time, National MPs "have been hyping the economy up to the rafters". In November and December, the Treasury said there were timing issues. "But the third time, even Treasury admits it doesn't know why the books are even more in the red," Parker said.
<filename>bin/alfs/simple-test/simple-test.c /* * Copyright (C) 1997 Massachusetts Institute of Technology * * This software is being provided by the copyright holders under the * following license. By obtaining, using and/or copying this software, * you agree that you have read, understood, and will comply with the * following terms and conditions: * * Permission to use, copy, modify, distribute, and sell this software * and its documentation for any purpose and without fee or royalty is * hereby granted, provided that the full text of this NOTICE appears on * ALL copies of the software and documentation or portions thereof, * including modifications, that you make. * * THIS SOFTWARE IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS MAKE NO * REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, * BUT NOT LIMITATION, COPYRIGHT HOLDERS MAKE NO REPRESENTATIONS OR * WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR * THAT THE USE OF THE SOFTWARE OR DOCUMENTATION WILL NOT INFRINGE ANY * THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS. COPYRIGHT * HOLDERS WILL BEAR NO LIABILITY FOR ANY USE OF THIS SOFTWARE OR * DOCUMENTATION. * * The name and trademarks of copyright holders may NOT be used in * advertising or publicity pertaining to the software without specific, * written prior permission. Title to copyright in this software and any * associated documentation will at all times remain with copyright * holders. See the file AUTHORS which should have accompanied this software * for a list of all copyright holders. * * This file may be derived from previously copyrighted software. This * copyright applies only to those changes made by the copyright * holders listed in the AUTHORS file. The rest of this file is covered by * the copyright notices, if any, listed below. */ #include "alfs/alfs.h" #include <assert.h> #include <stdio.h> #include <memory.h> #include <string.h> #include <stdlib.h> #define MAX_FDS 10 char space0[2048]; char spaceA[4096]; char spaceB[4096]; char spaceC[4096]; char spaceD[4096]; char spaceE[4096]; char spaceF[4096]; char spaceG[4096]; char spaceH[4096]; void printRet (ret) int ret; { if (ret >= 0) { printf ("OK\n"); return; } switch (ret) { case ALFS_ELENGTH: printf ("ALFS_ELENGTH\n"); break; case ALFS_ENOENT: printf ("ALFS_ENOENT\n"); break; case ALFS_EACCESS: printf ("ALFS_EACCESS\n"); break; case ALFS_EPERM: printf ("ALFS_EPERM\n"); break; case ALFS_EINUSE: printf ("ALFS_EINUSE\n"); break; case ALFS_ENFILE: printf ("ALFS_ENFILE\n"); break; case ALFS_EISDIR: printf ("ALFS_EISDIR\n"); break; case ALFS_EBADF: printf ("ALFS_EBADF\n"); break; case ALFS_EINVAL: printf ("ALFS_EINVAL\n"); break; case ALFS_EEXIST: printf ("ALFS_EEXIST\n"); break; case ALFS_ENOTEMPTY: printf ("ALFS_ENOTEMPTY\n"); break; default: printf ("unknown error return val: %d\n", ret); break; } } int main (int argc, char **argv) { int fds[MAX_FDS]; char line[4096]; int ret; uint devno, superblkno; if (argc != 2) { printf ("Usage: %s <devname>\n", argv[0]); exit (0); } devno = atoi(argv[1]); superblkno = alfs_initFS (devno, 4194304, alfs_makeCap()); alfs_mountFS (devno, superblkno); printf("Initialized successfully\n"); for (ret = 0; ret < 511; ret++) { line[ret] = 'Z'; } line[511] = 0; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("big name alfs_open: %d ", ret); printRet(ret); /* alfs_listDirectory(NULL); */ line[255] = 0; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("ok name alfs_open: %x ", ret); printRet(ret); /* alfs_listDirectory(NULL); */ ret = alfs_open("file1", OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open: %x ", ret); printRet(ret); fds[0] = ret; /* alfs_listDirectory(NULL); ret = alfs_open("file1", OPT_RDWR | OPT_CREAT, 0777); printf("repeated alfs_open: %d ", ret); printRet(ret); */ /* alfs_listDirectory(NULL); */ sprintf(line, "A"); ret = alfs_write(fds[0], line, 1); printf("alfs_write: "); printRet(ret); ret = alfs_lseek(fds[0], 0, 0); printf("alfs_lseek: "); printRet(ret); sprintf(line, "B"); ret = alfs_read(fds[0], line, 1); printf("alfs_read: %c ", line[0]); printRet(ret); ret = alfs_close(fds[0]); printf("alfs_close: "); printRet(ret); /* alfs_listDirectory(NULL); printf("allocedBlocks %d\n", allocedBlocks); */ ret = alfs_unlink("file1"); printf("alfs_unlink: "); printRet(ret); /* printf("allocedBlocks %d\n", allocedBlocks); alfs_listDirectory(NULL); */ ret = alfs_open("file1", OPT_RDONLY, 0); printf("broken alfs_open: "); printRet(ret); for (ret = 0; ret < 256; ret++) { line[ret] = 'Z'; } line[255] = 0; ret = alfs_unlink(line); printf("alfs_unlink: "); printRet(ret); alfs_listDirectory(NULL); for (ret=0; ret<246; ret++) { line[ret] = 'A'; } line[246] = '1'; line[247] = 0; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #1: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #1: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '2'; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #2: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #2: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '3'; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #3: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #3: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '4'; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #4: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #4: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '5'; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #5: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #5: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '6'; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #6: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #6: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '7'; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #7: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #7: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '8'; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #8: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #8: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '3'; ret = alfs_unlink(line); printf("alfs_unlink #3: "); printRet(ret); alfs_listDirectory(NULL); line[246] = '8'; /* ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open #8: %d ", ret); printRet(ret); ret = alfs_close(ret); printf("alfs_close #8: "); printRet(ret); alfs_listDirectory(NULL); */ ret = alfs_unlink(line); printf("alfs_unlink #8: "); printRet(ret); alfs_listDirectory(NULL); for (ret = 0; ret < 511; ret++) { line[ret] = 'Z'; } line[248] = 0; ret = alfs_open(line, OPT_RDWR | OPT_CREAT, 0777); printf("ok name alfs_open: %d ", ret); printRet(ret); alfs_listDirectory(NULL); for (ret=0; ret<2048; ret++) { space0[ret] = (char) 0; } for (ret=0; ret<4096; ret++) { spaceA[ret] = 'A'; spaceB[ret] = 'B'; spaceC[ret] = 'C'; spaceD[ret] = 'D'; spaceE[ret] = 'E'; spaceF[ret] = 'F'; spaceG[ret] = 'G'; spaceH[ret] = 'H'; } ret = alfs_open("file2", OPT_RDWR | OPT_CREAT, 0777); printf("alfs_open: %d ", ret); printRet(ret); fds[0] = ret; ret = alfs_write(fds[0], spaceA, 4096); printf("alfs_write spaceA: "); printRet(ret); ret = alfs_write(fds[0], spaceB, 4096); printf("alfs_write spaceB: "); printRet(ret); ret = alfs_write(fds[0], spaceC, 4096); printf("alfs_write spaceC: "); printRet(ret); ret = alfs_write(fds[0], spaceD, 4096); printf("alfs_write space[A-D]: "); printRet(ret); ret = alfs_lseek(fds[0], 8192, 0); printf("alfs_lseek: "); printRet(ret); ret = alfs_read(fds[0], line, 4096); printf("alfs_read spaceC: "); printRet(ret); printf("compare to written values: %d\n", bcmp(spaceC, line, 4096)); alfs_listDirectory(NULL); /* printf("allocedBlocks %d\n", allocedBlocks); */ ret = alfs_unlink("file2"); printf("alfs_unlink file2: "); printRet(ret); /* printf("allocedBlocks %d\n", allocedBlocks); */ #if 1 ret = alfs_mkdir ("dir1", 0777); printf ("alfs_mkdir dir1: "); printRet(ret); ret = alfs_unlink ("dir1/d"); printf ("unlink non-existent file: "); printRet(ret); ret = alfs_mkdir ("dir1/dir2", 0777); printf ("alfs_mkdir dir1/dir2: "); printRet (ret); ret = alfs_rmdir ("dir1"); printf ("alfs_rmdir nonempty directory: "); printRet (ret); ret = alfs_unlink ("dir1/dir2"); printf ("alfs_unlink dir1/dir2: "); printRet (ret); ret = alfs_rmdir ("dir1"); printf ("alfs_rmdir dir1: "); printRet (ret); #endif alfs_unmountFS (); {extern int alfs_alloced; printf ("done: alfs_alloced (%d)\n", alfs_alloced); } exit (0); }
/** * @author Jeremy Comte */ @RunWith(WebDriverRunner.class) @WebDriverRunnerOptions(reuseDriver = true, parallelize = @Parallelize) @Parameterized.UseParametersRunnerFactory(ParameterizedCucumberRunnerFactory.class) @CucumberOptions(glue = "se.redmind.rmtest.cucumber", plugin = "pretty") public class GoogleExample { public static class Steps { private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final WebDriverWrapper<?> driverWrapper; public Steps(WebDriverWrapper<?> driverWrapper) { this.driverWrapper = driverWrapper; } @Given("^that we send a rocket named \"([^\"]*)\" to the moon$") public void that_we_send_a_rocket_named_to_the_moon(String name) { logger.info("roger ... the " + name + " rocket has landed and we have a " + driverWrapper.getDriver()); } } }
<filename>src/main/java/io/temporal/samples/hello/HelloAsync.java /* * Copyright (c) 2020 Temporal Technologies, Inc. All Rights Reserved * * Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Modifications copyright (C) 2017 Uber Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not * use this file except in compliance with the License. A copy of the License is * located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.temporal.samples.hello; import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityOptions; import io.temporal.client.WorkflowClient; import io.temporal.client.WorkflowOptions; import io.temporal.serviceclient.WorkflowServiceStubs; import io.temporal.worker.Worker; import io.temporal.worker.WorkerFactory; import io.temporal.workflow.Async; import io.temporal.workflow.Promise; import io.temporal.workflow.Workflow; import io.temporal.workflow.WorkflowInterface; import io.temporal.workflow.WorkflowMethod; import java.time.Duration; /** * Sample Temporal workflow that demonstrates asynchronous activity invocation. * * <p>To execute this example a locally running Temporal service instance is required. You can * follow instructions on how to set up your Temporal service here: * https://github.com/temporalio/temporal/blob/master/README.md#download-and-start-temporal-server-locally */ public class HelloAsync { // Define the task queue name static final String TASK_QUEUE = "HelloAsyncActivityTaskQueue"; // Define our workflow unique id static final String WORKFLOW_ID = "HelloAsyncActivityWorkflow"; /** * Define the Workflow Interface. It must contain one method annotated with @WorkflowMethod. * * <p>Workflow code includes core processing logic. It that shouldn't contain any heavyweight * computations, non-deterministic code, network calls, database operations, etc. All those things * should be handled by Activities. * * @see io.temporal.workflow.WorkflowInterface * @see io.temporal.workflow.WorkflowMethod */ @WorkflowInterface public interface GreetingWorkflow { /** * This method is executed when the workflow is started. The workflow completes when the * workflow method finishes execution. */ @WorkflowMethod String getGreeting(String name); } /** * Define the Activity Interface. Activities are building blocks of any temporal workflow and * contain any business logic that could perform long running computation, network calls, etc. * * <p>Annotating activity methods with @ActivityMethod is optional * * @see io.temporal.activity.ActivityInterface * @see io.temporal.activity.ActivityMethod */ @ActivityInterface public interface GreetingActivities { // Define your activity method which can be called during workflow execution String composeGreeting(String greeting, String name); } // Define the workflow implementation which implements our getGreeting workflow method. public static class GreetingWorkflowImpl implements GreetingWorkflow { /** * Define the GreetingActivities stub. Activity stubs are proxies for activity invocations that * are executed outside of the workflow thread on the activity worker, that can be on a * different host. Temporal is going to dispatch the activity results back to the workflow and * unblock the stub as soon as activity is completed on the activity worker. * * <p>In the {@link ActivityOptions} definition the "setStartToCloseTimeout" option sets the * maximum time of a single Activity execution attempt. For this example it is set to 10 * seconds. */ private final GreetingActivities activities = Workflow.newActivityStub( GreetingActivities.class, ActivityOptions.newBuilder().setStartToCloseTimeout(Duration.ofSeconds(10)).build()); @Override public String getGreeting(String name) { /* * This is our workflow method. We invoke the composeGreeting method two times using * {@link io.temporal.workflow.Async#function(Func)}. * The results of each async activity method invocation returns us a * {@link io.temporal.workflow.Promise} which is similar to a Java {@link java.util.concurrent.Future} */ Promise<String> hello = Async.function(activities::composeGreeting, "Hello", name); Promise<String> bye = Async.function(activities::composeGreeting, "Bye", name); // After calling the two activity methods async, we block until we receive their results return hello.get() + "\n" + bye.get(); } } /** Simple activity implementation, that concatenates two strings. */ static class GreetingActivitiesImpl implements GreetingActivities { @Override public String composeGreeting(String greeting, String name) { return greeting + " " + name + "!"; } } /** * With our Workflow and Activities defined, we can now start execution. The main method starts * the worker and then the workflow. */ public static void main(String[] args) { // Define the workflow service. WorkflowServiceStubs service = WorkflowServiceStubs.newInstance(); /* * Define the workflow client. It is a Temporal service client used to start, signal, and query * workflows */ WorkflowClient client = WorkflowClient.newInstance(service); /* * Define the workflow factory. It is used to create workflow workers for a specific task queue. */ WorkerFactory factory = WorkerFactory.newInstance(client); /* * Define the workflow worker. Workflow workers listen to a defined task queue and process * workflows and activities. */ Worker worker = factory.newWorker(TASK_QUEUE); /* * Register our workflow implementation with the worker. * Workflow implementations must be known to the worker at runtime in * order to dispatch workflow tasks. */ worker.registerWorkflowImplementationTypes(GreetingWorkflowImpl.class); /* * Register our workflow activity implementation with the worker. Since workflow activities are * stateless and thread-safe, we need to register a shared instance. */ worker.registerActivitiesImplementations(new GreetingActivitiesImpl()); /* * Start all the workers registered for a specific task queue. * The started workers then start polling for workflows and activities. */ factory.start(); // Create the workflow client stub. It is used to start our workflow execution. GreetingWorkflow workflow = client.newWorkflowStub( GreetingWorkflow.class, WorkflowOptions.newBuilder() .setWorkflowId(WORKFLOW_ID) .setTaskQueue(TASK_QUEUE) .build()); // Execute our workflow and wait for it to complete. String greeting = workflow.getGreeting("World"); // Display workflow execution results System.out.println(greeting); System.exit(0); } }
def decode_password(self, encoded_pass): decoded_pass = "" encoded_pass = encoded_pass[1:] password_offuscation_table = ['A', 'E', 'I', 'M', 'Q', 'U', 'Y', ']', 'a', 'e', 'i', 'm', 'q', 'u', 'y', '}'] chars = [encoded_pass[i:i + 2] for i in range(0, len(encoded_pass), 2)] for char in chars: decoded_pass += chr(password_offuscation_table.index(char[0]) * 16 + password_offuscation_table.index(char[1])) return decoded_pass
<gh_stars>0 import React, { Component } from 'react'; class Presence extends Component { render() { return ( <div className="dashboard-bg-gray dashboard" id="dashboard-presence"> </div> ); } } export default Presence;
/** * * @author Petr Kuzel * @version */ class Util { /** Forbid creating new Util */ private Util() { } static String[] addString(String[] array, String val) { if (array == null) { return new String[] {val}; } else { String[] n = new String[array.length + 1]; System.arraycopy(array, 0, n, 0, array.length); n[array.length] = val; return n; } } static int indexOf(Object[] where, Object what) { if (where == null) return -1; for (int i = 0; i<where.length; i++) { if (where[i].equals(what)) return i; } return -1; } static int indexOf(String[] where, String what, boolean caseInsensitiv) { boolean isEqual; for (int i = 0; where != null && i < where.length; i++) { if (caseInsensitiv) isEqual = where[i].equalsIgnoreCase (what); else isEqual = where[i].equals(what); if (isEqual) return i; } return -1; } static boolean contains(Object[] where, Object what) { return indexOf(where, what) != -1; } static boolean contains(String[] where, String what, boolean caseInsensitiv) { return indexOf(where, what, caseInsensitiv) != -1; } static void writeUTF(DataOutput os, String s) throws IOException { if (s == null) { s = "\u0000"; } os.writeUTF(s); } static String readUTF(DataInput in) throws IOException { String s = in.readUTF(); if ("\u0000".equals(s)) { return null; } return s; } static void writeStrings(DataOutput out, String[] arr) throws IOException { if (arr == null) { out.writeInt(-1); return; } out.writeInt(arr.length); for (String m : arr) { writeUTF(out, m); } } static String[] readStrings(DataInput in) throws IOException { final int len = in.readInt(); if (len == -1) { return null; } final String[] arr = new String[len]; for (int i = 0; i < arr.length; i++) { arr[i] = readUTF(in); } return arr; } static void writeBytes(DataOutput out, byte[] arr) throws IOException { if (arr == null) { out.writeInt(-1); } else { out.writeInt(arr.length); out.write(arr); } } static byte[] readBytes(DataInput is) throws IOException { int len = is.readInt(); if (len == -1) { return null; } else { byte[] arr = new byte[len]; is.readFully(arr); return arr; } } }
/** * Constructs and return the {@link KnowledgeBaseSearchCriteria} with the * attributes "name" and "id", if the node have * {@link MicroDataNodeAttribute} defined, overrides "name" and "id" in the * {@link Node} parameter. * * @param node * The {@link Node} parameter. * @return The {@link KnowledgeBaseSearchCriteria} with the node attributes. */ public KnowledgeBaseSearchCriteria constructSearchCriteria(Node node) { String itemTypeValue = getFirstParentNodeWithMicroDataAttribute(node, MicroDataNodeAttribute.ITEMTYPE); KnowledgeBaseSearchCriteria searchCriteria = new KnowledgeBaseSearchCriteria(); if(itemTypeValue !=null && !itemTypeValue.isEmpty()){ String schemaName = SchemaUtils.getSchemaName(itemTypeValue); String itemTypeName = SchemaUtils.getItemTypeName(itemTypeValue); searchCriteria.setItemType(itemTypeName); searchCriteria.setSchema(schemaName); } if(searchCriteria.getSchema()==null || searchCriteria.getSchema().isEmpty()){ searchCriteria.setSchema(DEFAULT_SCHEMA); } if (formValueType.equals(FormValueType.VALID)) { searchCriteria.setValid(true); } else if (formValueType.equals(FormValueType.INVALID)) { searchCriteria.setValid(false); } else { searchCriteria.setValid(null); } Node nodeAttr; if (WHITEBOX_TEST) { for (int i = 0; i < microDataNodeAttributes.length; i++) { nodeAttr = node.getAttributes().getNamedItem(microDataNodeAttributes[i].getValue()); if (nodeAttr != null && nodeAttr.getNodeValue() != null && !nodeAttr.getNodeValue().isEmpty()) { String value = nodeAttr.getNodeValue(); if (MicroDataNodeAttribute.ITEMPROP.equals(microDataNodeAttributes[i])) { searchCriteria.setItemProp(value); } else if (MicroDataNodeAttribute.ITEMSCOPE.equals(microDataNodeAttributes[i])) { } else if (MicroDataNodeAttribute.ITEMID.equals(microDataNodeAttributes[i])) { } else if (MicroDataNodeAttribute.ITEMREF.equals(microDataNodeAttributes[i])) { } else if (MicroDataNodeAttribute.ITEMTYPE.equals(microDataNodeAttributes[i])) { String schemaName = SchemaUtils.getSchemaName(value); String itemTypeName = SchemaUtils.getItemTypeName(value); searchCriteria.setItemType(itemTypeName); searchCriteria.setSchema(schemaName); } } } } else { searchCriteria.setItemType(null); nodeAttr = node.getAttributes().getNamedItem(HtmlElementAttribute.ID.getValue()); String nodeValue = null; if (nodeAttr != null && nodeAttr.getNodeValue() != null && !nodeAttr.getNodeValue().isEmpty()) { nodeValue = nodeAttr.getNodeValue(); } else { nodeAttr = node.getAttributes().getNamedItem(HtmlElementAttribute.NAME.getValue()); if (nodeAttr != null && nodeAttr.getNodeValue() != null && !nodeAttr.getNodeValue().isEmpty()) { nodeValue = nodeAttr.getNodeValue(); } } if (AJUST_JSF_AUTO_GENERATED_ID) { nodeValue = JSFUtils.removeAutoGeneratedParentIdOrName(nodeValue); } searchCriteria.setItemProp(nodeValue); } LOG.trace("searchCriteria = {} for {} ", searchCriteria, node.getAttributes().getNamedItem( HtmlElementAttribute.ID.getValue())); return searchCriteria; }
Add to calendar The New York Knickerbockers will open the team’s 70th season of NBA basketball on Wednesday, Oct. 28, facing their Eastern Conference rival Milwaukee Bucks, on the road, at 8:00 p.m. (EST). The team’s home-opener at Madison Square Garden will be the next night, Thursday, Oct. 29 versus the Atlanta Hawks at 8:00 p.m. New York’s regular season home schedule is highlighted by one visit from the NBA Champion Golden State Warriors on Sunday, Jan. 31 and two visits by the Eastern Conference Champion Cleveland Cavaliers on Friday, Nov. 13 and Saturday, Mar. 26. Holiday games at MSG include the annual Martin Luther King, Jr. Day game on Monday, Jan. 18 versus the Philadelphia 76ers at 1:00 p.m. The team will play a total of five afternoon games at home. Prior to the 82-game regular season, New York will participate in six preseason games, with the schedule to be released at a later date. MSG Network, the award-winning television home of the New York Knickerbockers, will telecast 78 regular season games, and all six preseason games. For Knicks ticket information, visit nyknicks.com, call (877) NYK-HOOP or email [email protected]. 2015-16 New York Knicks Regular Season Schedule Oct. Wednesday.. 28.. at Milwaukee....... 8:00 p.m.