content
stringlengths
10
4.9M
<gh_stars>0 public class Fibonacci { static int fibonacci(int index) { if(index <= 2) return 1; return fibonacci(index - 1) + fibonacci(index - 2); } public static void main(String[] args) { int fibN = fibonacci(10); System.out.println(fibN); } }
def deserialize(file_handle): return pickle.load(file_handle)
Transnational Corporations and Human Rights: Strengthening Current Tools to Promote Accountability Abstract Transnational Corporations (TNCs) have not yet been recognised as subjects of international law, despite their dominant presence in the global relationship. They have no legal standing in international courts. Moreover, home states appear to be unwilling to punish the extraterritorial misconduct of their corporations. Thus, victims of corporate wrongdoings are left to contend with the very limited access and redress of domestic remedies in host countries. In this backdrop, this article addresses two issues: (a) what international instrument that can be best utilised to regulate TNCs' conduct with respect to human rights and (b) what framework may be used to promote better accountability mechanism to hold TNCs liable for international human rights violations. It argues that in order to provide greater access to remedies for victims, there has to be an enforceable standard of human rights responsibility for TNC, one that is internationally applicable. In order to make this chosen standard applied by the broad international community, it proposes the incorporation of the OECD Guidelines into the Bilateral Investment Treaties.
def _convert_to_unicode(string): codepoints = [] for character in string.split('-'): if character in BLACKLIST_UNICODE: next codepoints.append( r'\U{0:0>8}'.format(character).decode('unicode-escape') ) return codepoints
/** * This is the root page for all OCaml preferences. */ public class RootPreferencePage extends FieldEditorPreferencePage implements IWorkbenchPreferencePage { public RootPreferencePage() { super(FieldEditorPreferencePage.GRID); this.setPreferenceStore(OcamlPlugin.getInstance().getPreferenceStore()); this.setDescription("OcaIDE general preferences"); } @Override public void createFieldEditors() { this.addField(new BooleanFieldEditor( PreferenceConstants.P_DISABLE_UNICODE_CHARS, "Disable Unicode characters (check this if your system " + "doesn't display Unicode characters correctly)", this .getFieldEditorParent())); this.addField(new BooleanFieldEditor( PreferenceConstants.P_SHOW_TYPES_IN_OUTLINE, "Show the types in the outline", this .getFieldEditorParent())); this.addField(new BooleanFieldEditor( PreferenceConstants.P_SHOW_TYPES_IN_STATUS_BAR, "Show the types in the editor's status bar", this .getFieldEditorParent())); this.addField(new BooleanFieldEditor( PreferenceConstants.P_SHOW_TYPES_IN_POPUPS, "Show the types in popups when hovering over the editor", this .getFieldEditorParent())); } @Override public boolean performOk() { boolean result = super.performOk(); Misc.bNoUnicode = OcamlPlugin.getInstance().getPreferenceStore().getBoolean(PreferenceConstants.P_DISABLE_UNICODE_CHARS); return result; } public void init(IWorkbench workbench) { } }
/** * @brief Check candidates that are too close to each other, save the potential candidates * (i.e. biggest/smallest contour) and remove the rest */ static void _filterTooCloseCandidates(const vector< vector< Point2f > > &candidatesIn, vector< vector< vector< Point2f > > > &candidatesSetOut, const vector< vector< Point > > &contoursIn, vector< vector< vector< Point > > > &contoursSetOut, double minMarkerDistanceRate, bool detectInvertedMarker) { CV_Assert(minMarkerDistanceRate >= 0); vector<int> candGroup; candGroup.resize(candidatesIn.size(), -1); vector< vector<unsigned int> > groupedCandidates; for(unsigned int i = 0; i < candidatesIn.size(); i++) { for(unsigned int j = i + 1; j < candidatesIn.size(); j++) { int minimumPerimeter = min((int)contoursIn[i].size(), (int)contoursIn[j].size() ); for(int fc = 0; fc < 4; fc++) { double distSq = 0; for(int c = 0; c < 4; c++) { int modC = (c + fc) % 4; distSq += (candidatesIn[i][modC].x - candidatesIn[j][c].x) * (candidatesIn[i][modC].x - candidatesIn[j][c].x) + (candidatesIn[i][modC].y - candidatesIn[j][c].y) * (candidatesIn[i][modC].y - candidatesIn[j][c].y); } distSq /= 4.; double minMarkerDistancePixels = double(minimumPerimeter) * minMarkerDistanceRate; if(distSq < minMarkerDistancePixels * minMarkerDistancePixels) { if(candGroup[i]<0 && candGroup[j]<0){ candGroup[i] = candGroup[j] = (int)groupedCandidates.size(); vector<unsigned int> grouped; grouped.push_back(i); grouped.push_back(j); groupedCandidates.push_back( grouped ); } else if(candGroup[i] > -1 && candGroup[j] == -1){ int group = candGroup[i]; candGroup[j] = group; groupedCandidates[group].push_back( j ); } else if(candGroup[j] > -1 && candGroup[i] == -1){ int group = candGroup[j]; candGroup[i] = group; groupedCandidates[group].push_back( i ); } } } } } candidatesSetOut.clear(); contoursSetOut.clear(); vector< vector< Point2f > > biggerCandidates; vector< vector< Point > > biggerContours; vector< vector< Point2f > > smallerCandidates; vector< vector< Point > > smallerContours; for(unsigned int i = 0; i < groupedCandidates.size(); i++) { unsigned int smallerIdx = groupedCandidates[i][0]; unsigned int biggerIdx = smallerIdx; double smallerArea = contourArea(candidatesIn[smallerIdx]); double biggerArea = smallerArea; for(unsigned int j = 1; j < groupedCandidates[i].size(); j++) { unsigned int currIdx = groupedCandidates[i][j]; double currArea = contourArea(candidatesIn[currIdx]); if(currArea >= biggerArea) { biggerIdx = currIdx; biggerArea = currArea; } if(currArea < smallerArea && detectInvertedMarker) { smallerIdx = currIdx; smallerArea = currArea; } } biggerCandidates.push_back(candidatesIn[biggerIdx]); biggerContours.push_back(contoursIn[biggerIdx]); if(detectInvertedMarker) { smallerCandidates.push_back(alignContourOrder(candidatesIn[biggerIdx][0], candidatesIn[smallerIdx])); smallerContours.push_back(contoursIn[smallerIdx]); } } candidatesSetOut.push_back(biggerCandidates); contoursSetOut.push_back(biggerContours); candidatesSetOut.push_back(smallerCandidates); contoursSetOut.push_back(smallerContours); }
def upgrade_settings(self, setting_values, variable_revision_number, module_name): if variable_revision_number == 1: Added worm measurement and flipping setting_values = ( setting_values[:FIXED_SETTINGS_COUNT_V1] + ["No", "4", "No", "None"] + setting_values[FIXED_SETTINGS_COUNT_V1:] ) variable_revision_number = 2 if variable_revision_number == 2: Added horizontal worm measurements setting_values = ( setting_values[:IDX_FLIP_WORMS_V2] + ["1"] + setting_values[IDX_FLIP_WORMS_V2:] ) variable_revision_number = 3 return setting_values, variable_revision_number
k=input() c=[ len(s) for s in raw_input().split('1') ] print sum( [i*(i + 1)/2 for i in c] if k < 1 else [(c[i]+1)*(c[i+k] + 1) for i in range(len(c)-k)])
/******************************************************************************* * This file is part of the "Enduro2D" * For conditions of distribution and use, see copyright notice in LICENSE.md * Copyright (C) 2018-2020, by <NAME> (<EMAIL>) ******************************************************************************/ #pragma once #include "node.hpp" namespace e2d::nodes::impl { template < typename F, typename... Args > bool invoke_with_force_bool(F&& f, Args&&... args) { if constexpr ( std::is_invocable_r_v<bool, F, Args...> ) { return std::invoke(std::forward<F>(f), std::forward<Args>(args)...); } else { std::invoke(std::forward<F>(f), std::forward<Args>(args)...); return true; } } } namespace e2d::nodes { template < typename Node, typename F > bool for_each_child( const intrusive_ptr<Node>& root, F&& f, const options& opts) { if ( !root ) { return true; } if ( opts.reversed() ) { for ( auto child = root->last_child(); child; child = child->prev_sibling() ) { if ( opts.recursive() && !for_each_child(child, f, options(opts).include_root(false)) ) { return false; } if ( !impl::invoke_with_force_bool(f, child) ) { return false; } } if ( opts.include_root() && !impl::invoke_with_force_bool(f, root) ) { return false; } } else { if ( opts.include_root() && !impl::invoke_with_force_bool(f, root) ) { return false; } for ( auto child = root->first_child(); child; child = child->next_sibling() ) { if ( !impl::invoke_with_force_bool(f, child) ) { return false; } if ( opts.recursive() && !for_each_child(child, f, options(opts).include_root(false)) ) { return false; } } } return true; } template < typename Node, typename F > bool for_each_parent( const intrusive_ptr<Node>& root, F&& f, const options& opts) { if ( !root ) { return true; } if ( opts.reversed() ) { if ( root->has_parent() ) { if ( opts.recursive() && !for_each_parent(root->parent(), f, options(opts).include_root(false)) ) { return false; } if ( !impl::invoke_with_force_bool(f, root->parent()) ) { return false; } } if ( opts.include_root() && !impl::invoke_with_force_bool(f, root) ) { return false; } } else { if ( opts.include_root() && !impl::invoke_with_force_bool(f, root) ) { return false; } if ( root->has_parent() ) { if ( !impl::invoke_with_force_bool(f, root->parent()) ) { return false; } if ( opts.recursive() && !for_each_parent(root->parent(), f, options(opts).include_root(false)) ) { return false; } } } return true; } } namespace e2d::nodes { template < typename Node, typename Iter > std::size_t extract_parents( const intrusive_ptr<Node>& root, Iter iter, const options& opts) { std::size_t count{0u}; for_each_parent(root, [&count, &iter](const auto& parent){ ++count; iter++ = parent; }, opts); return count; } template < typename Node, typename Iter > std::size_t extract_children( const intrusive_ptr<Node>& root, Iter iter, const options& opts) { std::size_t count{0u}; for_each_child(root, [&count, &iter](const auto& child){ ++count; iter++ = child; }, opts); return count; } } namespace e2d::nodes { template < typename Node, typename F > bool for_extracted_parents( const intrusive_ptr<Node>& root, F&& f, const options& opts) { //TODO(BlackMat): replace it to frame allocator static thread_local vector<intrusive_ptr<Node>> parents; const std::size_t begin_index = parents.size(); E2D_DEFER([begin_index](){ parents.erase( parents.begin() + begin_index, parents.end()); }); extract_parents( root, std::back_inserter(parents), opts); const std::size_t end_index = parents.size(); for ( std::size_t i = begin_index; i < end_index; ++i ) { if ( !impl::invoke_with_force_bool(f, parents[i]) ) { return false; } } return true; } template < typename Node, typename F > bool for_extracted_children( const intrusive_ptr<Node>& root, F&& f, const options& opts) { //TODO(BlackMat): replace it to frame allocator static thread_local vector<intrusive_ptr<Node>> children; const std::size_t begin_index = children.size(); E2D_DEFER([begin_index](){ children.erase( children.begin() + begin_index, children.end()); }); extract_children( root, std::back_inserter(children), opts); const std::size_t end_index = children.size(); for ( std::size_t i = begin_index; i < end_index; ++i ) { if ( !impl::invoke_with_force_bool(f, children[i]) ) { return false; } } return true; } } namespace e2d::nodes { template < typename Component, typename Node, typename Iter > std::size_t extract_components_from_parents( const intrusive_ptr<Node>& root, Iter iter, const options& opts) { std::size_t count{0u}; for_each_parent(root, [&count, &iter](const auto& parent){ if ( auto component = parent->owner().template component<Component>() ) { ++count; iter++ = component; } }, opts); return count; } template < typename Component, typename Node, typename Iter > std::size_t extract_components_from_children( const intrusive_ptr<Node>& root, Iter iter, const options& opts) { std::size_t count{0u}; for_each_child(root, [&count, &iter](const auto& child){ if ( auto component = child->owner().template component<Component>() ) { ++count; iter++ = component; } }, opts); return count; } } namespace e2d::nodes { template < typename Component, typename Node, typename F > bool for_extracted_components_from_parents( const intrusive_ptr<Node>& root, F&& f, const options& opts) { //TODO(BlackMat): replace it to frame allocator static thread_local vector<gcomponent<Component>> components; const std::size_t begin_index = components.size(); E2D_DEFER([begin_index](){ components.erase( components.begin() + begin_index, components.end()); }); extract_components_from_parents<Component>( root, std::back_inserter(components), opts); const std::size_t end_index = components.size(); for ( std::size_t i = begin_index; i < end_index; ++i ) { if ( !impl::invoke_with_force_bool(f, components[i]) ) { return false; } } return true; } template < typename Component, typename Node, typename F > bool for_extracted_components_from_children( const intrusive_ptr<Node>& root, F&& f, const options& opts) { //TODO(BlackMat): replace it to frame allocator static thread_local vector<gcomponent<Component>> components; const std::size_t begin_index = components.size(); E2D_DEFER([begin_index](){ components.erase( components.begin() + begin_index, components.end()); }); extract_components_from_children<Component>( root, std::back_inserter(components), opts); const std::size_t end_index = components.size(); for ( std::size_t i = begin_index; i < end_index; ++i ) { if ( !impl::invoke_with_force_bool(f, components[i]) ) { return false; } } return true; } } namespace e2d::nodes { template < typename Component, typename Node > gcomponent<Component> find_component_from_parents( const intrusive_ptr<Node>& root, const options& opts) { gcomponent<Component> component; for_each_parent(root, [&component](const auto& child){ if ( auto child_component = child->owner().template component<Component>() ) { component = child_component; return false; } return true; }, opts); return component; } template < typename Component, typename Node > gcomponent<Component> find_component_from_children( const intrusive_ptr<Node>& root, const options& opts) { gcomponent<Component> component; for_each_child(root, [&component](const auto& child){ if ( auto child_component = child->owner().template component<Component>() ) { component = child_component; return false; } return true; }, opts); return component; } }
/** * ErrorCalculation: An implementation of root mean square (RMS) * error calculation. This class is used by nearly every neural * network in this book to calculate error. * * @author Jeff Heaton * @version 2.1 */ public class ErrorCalculation { private double globalError; private int setSize; public static double errorNN; //mzbik /** * Returns the root mean square error for a complete training set. * * @param len * The length of a complete training set. * @return The current error for the neural network. */ public double calculateRMS() { final double err = Math.sqrt(this.globalError / (this.setSize)); errorNN = err; // System.out.println("Error = " + err); return err; } /** * Reset the error accumulation to zero. */ public void reset() { this.globalError = 0; this.setSize = 0; } /** * Called to update for each number that should be checked. * @param actual The actual number. * @param ideal The ideal number. */ public void updateError(final double actual[], final double ideal[]) { for (int i = 0; i < actual.length; i++) { final double delta = ideal[i] - actual[i]; this.globalError += delta * delta; } this.setSize += ideal.length; } }
import { Component, Vue, Inject } from 'vue-property-decorator'; import appconst from './appconst' export default class AbpBase extends Vue { L(value: string, source?: string, ...argus: string[]): string { if (source) { return window.abp.localization.localize(value, source, argus); } else { return window.abp.localization.localize(value, appconst.localization.defaultLocalizationSourceName, argus); } } hasPermission(permissionName: string) { return window.abp.auth.hasPermission(permissionName); } hasAnyOfPermissions(...argus: string[]) { return window.abp.auth.hasAnyOfPermissions(...argus); } hasAllOfPermissions(...argus: string[]) { return window.abp.auth.hasAllOfPermissions(...argus); } }
You must enter the characters with black color that stand out from the other characters — The Carolina RailHawks and Miami FC met just two months ago, but the squads that took the WakeMed Soccer Park pitch Friday evening were far different from the sides that slogged to a scoreless draw in mid-May. Miami ran out five new starters, including recent acquisitions Gabriel Farfán, Michael Lahoud, Kwadwo Poku and Jonny Steele. Meanwhile, the hometown Hawks sported another goalkeeper in Brian Sylvestre, new USL loanee Mickey Daly off the bench, and couple of new forwards in mid-season pickup Matt Fondy and Omar Bravo, the Chivas de Gadalajara star making his official RailHawks debut. While Friday’s result was the same, the circuitous route the teams took to arrive at the 3-3 draw was equally divergent. The match saw three equalizers, two lead changes and a debut goal for Bravo. The teal and white visitors struck first. A simple cross from Jonathan Borrajo in the 7th minute flew past a cast of RailHawks before finding Miami forward Jamie Chavez charging into the goalmouth. Chavez’s chip cleared Sylvestre before clanging off the underside of the crossbar and across the goal line for a 1-0 lead. Carolina evened matters in the 30th minute. A free kick off the left wing by Nazmi Albadawi pinged around the area before Fondy got a second touch on the Under Armour orb, nudging it across the goal line past prone Miami defender Rhett Bernstein, knotting the score at 1-1. Injuries forced Carolina into two first-half substitutions. In the 25th minute, Matt Watson left with an injured hamstring, giving way to Daly, a center back from USL side Bethlehem Steel who just arrived in Cary this week. Daly slid in at center back and James Marcelin moved up to his natural midfield position. In the 38th minute, Miami drew a foul that appeared to temporarily petrify the home defenders. Miami’s Richie Ryan pounced, chipping his free kick to Kwadwo Poku creeping past the somnolent RailHawks backline. Poku maneuvered around Sylvestre’s unsuccessful challenge, then converted the open goal to reclaim a 2-1 advantage. With halftime approaching, team captain and leading assist man Nazmi Albadawi also left the match with a groin injury, with Austin da Luz subbing in to assume the armband. Just as Carolina appeared adrift early in the second stanza, it suddenly conjured an equalizer in the 61st, when a cross from Tiyi Shipalane found Brian Shriver’s leaping head. His redirection cleared goalkeeper Daniel Vega’s reach to even the match at 2-2. It was Carolina’s turn to grab its first lead in the 79th minute, when Shipalane centered to Bravo, who slotted his shot past Vega for a 3-2 score, triggering delirium in the grandstands. “My teammate went to the end line and then crossed it back,” Bravo said. “I only thought about shooting on goal. We had already come back from behind, and I just wanted to score the winning goal.” With 6,060 fans still in full throat, Carolina's storybook ending lasted a mere minute. Second-half sub Dario Cvitanich delivered a harmless, looping cross into the area that once again found Chavez charging past the RailHawks’ back line. Chavez settled the sphere and calmly converted to account for the 3-3 final margin. For all the bluster and Bravo, the match ultimately revealed two teams tied for surrendering the most goals (22 each) in the NASL this year. Sylvestre faced four shots on target and saved only one. His counterpart Vega faced eight shots on frame and made five saves. “I don’t think it was one of [Sylvestre’s] better performances,” said RailHawks manager Colin Clarke. “He’d be the first to admit that. But goal-wise, it wasn’t down to him. We’ve got to do better defensively picking up in the box. There are a couple of other [players] who let runners run free in our box, and that’s not good enough.” Nevertheless, the post-game featured the odd spectacle of a home team exuding satisfaction over its comeback, and a road side ruing its lost leads. “We didn’t do a good job with one-on-one situations in our box tonight, and it cost us,” Clarke said. “We always feel like we’re going to score goals with the additions of Fondy and now Omar. I was happy in a lot of ways, with our response and character from being 1-0 down and then 2-0 down, and getting back into it. We’ve got to learn from those moments when we fall asleep on free kicks, and straight after we score we have to be a little smarter.” “I think we deserved to win,” said Miami manager Alessandro Nesta. “Every ball is our box became a problem for nothing. But the team is much better, and the level is going up. We had many chances to finish the game, but we didn’t score.” Nesta said the solution to his season-long defensive woes is simpler. “Our defensive problem is mental,” he said. “In the most important moments of the game when there’s more pressure, we need to have more confidence. We need personality.” Nesta attributed the different performance of his team from two months ago to, well, different players. “We changed six players—this a different team,” Nesta said. “This is our first year, and our first team was so-so for me. We have a very good owner who is spending good money to fix the roster. For me, we can compete to win the league. But we have to start to win.” Meanwhile, the RailHawks’ frustrating home draw stood in stark contrast to the festive post-match atmosphere, with star-gazers clamoring for an autograph or just a sidelong smile from Bravo. “It’s important to get to know your teammates,” Bravo said. “As the games come along, it’s going to start feeling better.” The RailHawks (6-3-5, 21 pts.) next embark on a three-match road trip beginning Wednesday at the Ottawa Fury. Carolina returns to Cary Aug. 13 to host Puerto Rico FC. BOX SCORE LINEUPS CAR: Sylvestre, Beckie, Marcelin, Mensing, Moses, Watson (Daly, 25’), Albadawi (da Luz, 43’), Shipalane (Orlando, 87’), Shriver, Fondy, Bravo MIA: Vega, Farfán, Trafford (Adailton, 67’), Bernstein, Borrajo, Steele (B. Smith, 88’), Ryan, Lahoud, Poku, Chavez, Campos (Cvitanich, 67’) GOALS CAR: Fondy, 30’; Shriver, 61’ (Shipalane); Bravo, 79’ (Shipalane) MIA: Chavez, 7’ (Borrajo); Poku, 38’ (Ryan); Chavez, 80’ (Cvitanich) CAUTIONS CAR: Marcelin, 58’; Beckie, 82’; Mensing, 84’ MIA: Trafford, 65’; Poku, 90’ EJECTIONS CAR: --- MIA: --- ATTENDANCE: 6,060
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..compat import compat_str from ..utils import ( int_or_none, determine_protocol, try_get, unescapeHTML, ) class DailyMailIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?dailymail\.co\.uk/(?:video/[^/]+/video-|embed/video/)(?P<id>[0-9]+)' _TESTS = [{ 'url': 'http://www.dailymail.co.uk/video/tvshowbiz/video-1295863/The-Mountain-appears-sparkling-water-ad-Heavy-Bubbles.html', 'md5': 'f6129624562251f628296c3a9ffde124', 'info_dict': { 'id': '1295863', 'ext': 'mp4', 'title': 'The Mountain appears in sparkling water ad for \'Heavy Bubbles\'', 'description': 'md5:a93d74b6da172dd5dc4d973e0b766a84', } }, { 'url': 'http://www.dailymail.co.uk/embed/video/1295863.html', 'only_matching': True, }] @staticmethod def _extract_urls(webpage): return re.findall( r'<iframe\b[^>]+\bsrc=["\'](?P<url>(?:https?:)?//(?:www\.)?dailymail\.co\.uk/embed/video/\d+\.html)', webpage) def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) video_data = self._parse_json(self._search_regex( r"data-opts='({.+?})'", webpage, 'video data'), video_id) title = unescapeHTML(video_data['title']) sources_url = (try_get( video_data, (lambda x: x['plugins']['sources']['url'], lambda x: x['sources']['url']), compat_str) or 'http://www.dailymail.co.uk/api/player/%s/video-sources.json' % video_id) video_sources = self._download_json(sources_url, video_id) body = video_sources.get('body') if body: video_sources = body formats = [] for rendition in video_sources['renditions']: rendition_url = rendition.get('url') if not rendition_url: continue tbr = int_or_none(rendition.get('encodingRate'), 1000) container = rendition.get('videoContainer') is_hls = container == 'M2TS' protocol = 'm3u8_native' if is_hls else determine_protocol({'url': rendition_url}) formats.append({ 'format_id': ('hls' if is_hls else protocol) + ('-%d' % tbr if tbr else ''), 'url': rendition_url, 'width': int_or_none(rendition.get('frameWidth')), 'height': int_or_none(rendition.get('frameHeight')), 'tbr': tbr, 'vcodec': rendition.get('videoCodec'), 'container': container, 'protocol': protocol, 'ext': 'mp4' if is_hls else None, }) self._sort_formats(formats) return { 'id': video_id, 'title': title, 'description': unescapeHTML(video_data.get('descr')), 'thumbnail': video_data.get('poster') or video_data.get('thumbnail'), 'formats': formats, }
// lexText is the master lex routine. The lexer is started in this state. func lexText(l *Lexer) stateFn { for { if strings.HasPrefix(l.input[l.pos:], "/*") { return lexBlockComment } r := l.next() if r == eof { break } if unicode.IsSpace(r) { l.ignore() return lexText } if l.column == 1 && r == '%' { l.backup() return lexDirective } if unicode.IsLetter(r) { l.backup() return lexIdent } if unicode.IsNumber(r) || r == '-' { l.backup() return lexNumber } if t, isToken := l.oneRuneToken(r); isToken == true { l.emit(t) } } return nil }
A stabilized finite element scheme for the Navier-Stokes equations on quadrilateral anisotropic meshes It is well known that the classical local projection method as well as residual-based stabi- lization techniques, as for instance streamline upwind Petrov-Galerkin (SUPG), are optimal on isotropic meshes. Here we extend the local projection stabilization for the Navier-Stokes system to anisotropic quadrilateral meshes in two spatial dimensions. We describe the new method and prove an ap riori error estimate. This method leads on anisotropic meshes to qualitatively better convergence behavior than other isotropic stabilization methods. The capability of the method is illustrated by means of two numerical test problems. Mathematics Subject Classification. 35Q30, 65N30, 76D05.
//@source https://gist.github.com/benorama/93373c3c1c3574732d6cc1b4754aab9f /** * Based on Vertx EventBus Client (https://github.com/vert-x3/vertx-bus-bower) * Requires SockJS Client */ import {EventEmitter, Injectable} from "@angular/core"; import * as SockJS from 'sockjs-client'; @Injectable() export class EventBusService { static initialized: boolean = false; static MAX_EVENT_QUEUE_SIZE: number = 100; static STATE_CONNECTING: number = 0; static STATE_OPEN: number = 1; static STATE_CLOSING: number = 2; static STATE_CLOSED: number = 3; static TYPE_PUBLISH: string = 'publish'; static TYPE_SEND: string = 'send'; static TYPE_REGISTER: string = 'register'; static TYPE_REGISTER_HANDLER: string = 'registerHandler'; static TYPE_UNREGISTER: string = 'unregister'; static TYPE_UNREGISTER_HANDLER: string = 'unregisterHandler'; public close: EventEmitter<any> = new EventEmitter<any>(); public open: EventEmitter<any> = new EventEmitter<any>(); private defaultHeaders: any; private eventQueue: QueuedEvent[]; private handlers: any = {}; private replyHandlers: any = {}; private sockJS; private state: number; constructor() { if (EventBusService.initialized) { throw new Error('Only one vertx eventBus can exist per application.'); } EventBusService.initialized = true; } get connected(): boolean { return this.state === EventBusService.STATE_OPEN; } connect(url: string, defaultHeaders: any = null, options: any = {}): void { let pingInterval = options.vertxbus_ping_interval || 5000; let pingTimerID; this.defaultHeaders = defaultHeaders; this.eventQueue = []; this.handlers = {}; this.replyHandlers = {}; this.sockJS = new SockJS(url, null, options); this.state = EventBusService.STATE_CONNECTING; let sendPing = () => { this.sockJS.send(JSON.stringify({type: 'ping'})); }; this.sockJS.onopen = () => { this.state = EventBusService.STATE_OPEN; // Send the first ping then send a ping every pingInterval milliseconds sendPing(); this.flushEventQueue(); pingTimerID = setInterval(sendPing, pingInterval); this.open.emit(null); }; this.sockJS.onclose = (e) => { this.state = EventBusService.STATE_CLOSED; if (pingTimerID) clearInterval(pingTimerID); this.close.emit(null); }; this.sockJS.onmessage = (e) => { let json = JSON.parse(e.data); // define a reply function on the message itself if (json.replyAddress) { Object.defineProperty(json, 'reply', { value: function (message, headers, callback) { this.send(json.replyAddress, message, headers, callback); } }); } if (this.handlers[json.address]) { // iterate all registered handlers let handlers = this.handlers[json.address]; for (let i = 0; i < handlers.length; i++) { if (json.type === 'err') { handlers[i]({ failureCode: json.failureCode, failureType: json.failureType, message: json.message }); } else { handlers[i](null, json); } } } else if (this.replyHandlers[json.address]) { // Might be a reply message let handler = this.replyHandlers[json.address]; delete this.replyHandlers[json.address]; if (json.type === 'err') { handler({failureCode: json.failureCode, failureType: json.failureType, message: json.message}); } else { handler(null, json); } } else { if (json.type === 'err') { try { console.error(json); } catch (e) { // dev tools are disabled so we cannot use console on IE } } else { try { console.warn('No handler found for message: ', json); } catch (e) { // dev tools are disabled so we cannot use console on IE } } } }; } disconnect() { if (this.sockJS) { this.state = EventBusService.STATE_CLOSING; this.sockJS.close(); } } /** * Publish a message * * @param {String} address * @param {Object} body * @param {Object} [headers] */ publish(address: string, body: any, headers?: any) { if (this.connected) { let message: any = { address: address, body: body, headers: mergeHeaders(this.defaultHeaders, headers), type: EventBusService.TYPE_PUBLISH }; this.sockJS.send(JSON.stringify(message)); } else { this.addEventToQueue({address, body, type: EventBusService.TYPE_PUBLISH}); } }; /** * Send a message * * @param {String} address * @param {Object} body * @param {Function} [replyHandler] * @param {Object} [headers] */ send<T>(address: string, body: any, replyHandler?: Function, headers?: any): void { if (this.connected) { let message: any = { address: address, body: body, headers: mergeHeaders(this.defaultHeaders, headers), type: EventBusService.TYPE_SEND }; if (replyHandler) { let replyAddress = makeUUID(); message.replyAddress = replyAddress; this.replyHandlers[replyAddress] = replyHandler; } this.sockJS.send(JSON.stringify(message)); } else { this.addEventToQueue({address, handler: replyHandler, body, type: EventBusService.TYPE_SEND}); } }; /*sendWithTimeout<T>(address: string, message: any, timeout: number, replyHandler?: Function): EventBus { return this.eventBus.sendWithTimeout(address, message, replyHandler); }; setDefaultReplyTimeout(millis: number): EventBus { return this.eventBus.setDefaultReplyTimeout(millis); };*/ /** * * @param address * @param headers */ register<T>(address: string, headers?: any): void { if (this.connected) { let envelope: any = { address: address, headers: mergeHeaders(this.defaultHeaders, headers), type: EventBusService.TYPE_REGISTER }; this.sockJS.send(JSON.stringify(envelope)); } } /** * Register a new handler * * @param {String} address * @param {Function} handler * @param {Object} [headers] */ registerHandler<T>(address: string, handler: Function, headers?: any): void { if (this.connected) { // ensure it is an array if (!this.handlers[address]) { this.handlers[address] = []; // First handler for this address so we should register the connection this.register(address, headers); } this.handlers[address].push(handler); } else { this.addEventToQueue({address, handler, type: EventBusService.TYPE_REGISTER_HANDLER}); } }; /** * * @param address * @param headers */ unregister<T>(address: string, headers?: any): void { if (this.connected) { let envelope: any = { address: address, headers: mergeHeaders(this.defaultHeaders, headers), type: EventBusService.TYPE_UNREGISTER }; this.sockJS.send(JSON.stringify(envelope)); } delete this.handlers[address]; } /** * Unregister a handler * * @param {String} address * @param {Function} handler * @param {Object} [headers] */ unregisterHandler<T>(address: string, handler: Function, headers?: any): void { if (this.connected) { let handlers = this.handlers[address]; if (handlers) { let idx = handlers.indexOf(handler); if (idx != -1) { handlers.splice(idx, 1); if (handlers.length === 0) { // No more local handlers so we should unregister the connection this.unregister(address, headers); } } } } else { this.addEventToQueue({address, handler, type: EventBusService.TYPE_UNREGISTER}); } }; // PRIVATE private addEventToQueue(event) { if (!this.eventQueue) { return; } this.eventQueue.push(event); if (this.eventQueue.length > EventBusService.MAX_EVENT_QUEUE_SIZE) { // Remove oldest events from the queue this.eventQueue.splice(0, this.eventQueue.length - EventBusService.MAX_EVENT_QUEUE_SIZE); } } private flushEventQueue() { if (!this.connected) { return; } while (this.eventQueue.length > 0) { let event: QueuedEvent = this.eventQueue.shift(); switch (event.type) { case EventBusService.TYPE_PUBLISH: this.publish(event.address, event.body); break; case EventBusService.TYPE_REGISTER_HANDLER: this.registerHandler(event.address, event.handler); break; case EventBusService.TYPE_UNREGISTER_HANDLER: this.unregisterHandler(event.address, event.handler); break; } } } } interface QueuedEvent { address: string; body: any; // Only for PUBLISH/SEND events handler: Function; // Only for REGISTER/SEND events headers: any; type: string; } function makeUUID() { return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (a, b) { return b = Math.random() * 16, (a == 'y' ? b & 3 | 8 : b | 0).toString(16); }); } function mergeHeaders(defaultHeaders, headers) { if (defaultHeaders) { if (!headers) { return defaultHeaders; } for (let headerName in defaultHeaders) { if (defaultHeaders.hasOwnProperty(headerName)) { // user can overwrite the default headers if (typeof headers[headerName] === 'undefined') { headers[headerName] = defaultHeaders[headerName]; } } } } // headers are required to be a object return headers || {}; }
/** * Scale the given BufferedImage to width and height that are powers of two. * Return the new scaled BufferedImage. */ public static BufferedImage convertToPowerOf2(BufferedImage bsrc) { int newW = GLApp.getPowerOfTwoBiggerThan(bsrc.getWidth()); int newH = GLApp.getPowerOfTwoBiggerThan(bsrc.getHeight()); if (newW == bsrc.getWidth() && newH == bsrc.getHeight()) { return bsrc; } else { AffineTransform at = AffineTransform.getScaleInstance((double)newW/bsrc.getWidth(),(double)newH/bsrc.getHeight()); BufferedImage bdest = new BufferedImage(newW, newH, BufferedImage.TYPE_INT_ARGB); Graphics2D g = bdest.createGraphics(); g.drawRenderedImage(bsrc,at); return bdest; } }
// sjoin calls the String method of all parameters and joins them with a comma as a separator. func sjoin(s ...Stringer) string { ss := make([]string, len(s)) for i := range s { ss[i] = s[i].String() } return strings.Join(ss, ",") }
Prostitution is oft cited as the oldest of professions, but is it possible that it might just be the most noble as well? Today, we view prostitutes, typically, as women who engage in sexual activity for payment. However, back in the Tang Dynasty (618-907), a modern man would be astounded by the brothels of the day and the women living in them; modern interpretations simply fail to grasp the complexity of the “prostitute” of yore. Our sexual time traveler would find an artistic trade that transcends the simple and tawdry exchange of sex for money. For a long time in ancient China, prostitution was completely legal. As scholar Lin Yutang (林语堂) wrote: “One can never overstate the important roles Chinese prostitutes played in romantic relationships, literature, music, and politics.” The contradiction between the modern and the ancient concepts of prostitution in part comes from the origin of the word itself. The Chinese character for prostitute, 妓 (jì), is not so much to do with sex but instead “a female performer”. These women did not just offer sex but rather the pleasure of their company through music, singing, dancing, and even poetry. In ancient China, noble ladies did not need to be intelligent or talented to be respectable, and ancient China, for all its delicate charms, could be hard on women. A proverb first seen in the Ming Dynasty (1368-1644) book The Elders Thus Say (《安得长者言》), is frequently quoted describing the ideal woman: “A woman is virtuous as long as she is ignorant.” The Chinese woman is supposed to be obedient to her husband, dutiful to her children, mind her domestic affairs and be virtuously ignorant on all other matters. As wives and concubines were expected to abide by social codes, Chinese men were in need of intellectual counterparts of the opposite sex. Marriages were matters of social hierarchy, leaving endless scholars and aristocrats with marriages that lacked both the affection and communication that can be found on a deeper, more spiritual plane. Prostitutes were exceptions to the rule. Unlike the girls brought up in ordinary families who were deprived of education, prostitutes were taught to become—not merely entertaining performers—but the mental equals to aristocrats, scholars, government officials, and all manner of high society. As the Dutch sinologist Robert van Gulik observed in his 1961 book Sexual Life in Ancient China, when Chinese men courted prostitutes they were more looking for a friends with benefits type scenario, sometimes not even requiring sex at all. By enjoying the company of these skilled, entertaining, and intelligent women, they could escape from their sexual obligations to their wives and concubines, as well as the dull atmosphere of their homes. Flipping through The Complete Poetry of the Tang (《全唐诗》)—one of the most colossal compilations of Chinese poetry—reveals the influence of prostitutes upon Tang Dynasty culture. Of the 49,000 poems, over 4,000 are related to prostitutes and 136 were written by prostitutes themselves. Prostitution flourishing in the Tang Dynasty was probably due to the founding of a new governmental administration called jiaofang (教坊)—literally meaning “The School”, but “conservatory” may be more exact—a high-end finishing school for girls. They trained in music and dancing as well as literature, calligraphy, and a host of other highbrow entertainments such as chess and literary drinking games. The jiaofang system lasted several centuries, at least until the middle of the Qing Dynasty (1616-1911). The prostitutes trained in jiaofang were called “official prostitutes” and provided entertainment for officials and scholars alike. In Chang’an, these registered prostitutes usually needed to have at least one excellent quality to establish their fame; dancing, singing, and literary talent were all highly revered skills. Rather than sex, brothels’ main income came from the holding of feasts. The madams running the brothels didn’t encourage the prostitutes to have sex with their guests as this would decrease their value, and, of course, the fear of pregnancy was ever-present. Sexual relationships usually happened with the prostitute’s consent, and she usually maintained only one sexual relationship at one time. If a man wanted to pursue a sexual relationship with a prostitute, he had to be careful; if it came to light that the prostitute had a major patron of high rank, things could get ugly. The key to a prostitute’s popularity was usually not her body but her mind. In The Notebook of a Drunken Man《醉翁谈录》), a book by Song Dynasty (960-1127) writer Luo Ye (罗烨), the writer gives a faithful account of Chang’an’s biggest brothel, Ping Kang Li, and described in detail several of its more famous prostitutes and their respective characters. Interestingly, most of the prostitutes were not known for their beauty—some of them were even quite plainly described as being average looking. But, their intellect and poetry made them desirable. Not everyone could afford the pleasures of these prostitutes; according to the writings of Song scholar Sun Qi (孙棨), the residence of a first-class Tang prostitute contained spacious halls, yards with artificial hills and ponds, and exquisitely decorated furniture. In his masterful essay “Prostitutes and Concubines” (《妓女与姬妾》 ), Lin Yutang wrote: “To approach those women was not as easy as it seemed. The men usually needed to spend months and even years in pursuit, squandering thousands of silver goblets.” Red light districts were a veritable who’s who of high-society, nothing like the seedy backstreets of today. Aside from the upper-classes, young scholars made up the backbone of a brothel’s clientele. The Imperial Exam was held in Chang’an every three years, during which time young (and old) examinees flocked to the capital. The exam was a way for the government to screen officials, and passing it gave you the chance to acquire a comfortable administrative position as well as heightened social status. It was an unwritten rule that those who got the degree would throw lively parties in brothels. The Chronicles of the Tianbao Era (《天宝遗事》), a historic book on the Tang Dynasty, recorded a night in the Ping Kang Li brothel: “There you can find all the elite young men in town, and it is packed with scholars who have just excelled in the imperial exam, roaming around with their name cards.” Poetry in the Tang Dynasty bore the same influence as the top hits in today’s music charts. A famous scholar’s poem could make or break a prostitute’s fame; A poet named Cui Ya (崔涯) was such a prostitute critic. “Every poem that he wrote about a brothel would immediately spread in the streets and alleyways in town. If it was in praise, then the prostitute’s gate would be lined with carriages and horses; if it was negative, then the prostitute would be so panicked that she couldn’t eat or drink,” wrote Tang Dynasty scholar Fan Shu (范摅) concerning Cui Ya’s authority. In short, Cui Ya knew his prostitutes. The poets’ relationships were, in many ways, symbiotic. The prostitutes served as the perfect muse for the poets’ writings, and the poetry allowed both to find fame. With that, the intimate relationship between poet and prostitute blossomed. Like Cui Ya, Liu Yong (柳永), a Song Dynasty (960-1279) poet, spent his entire lifetime writing poems for prostitutes; unfortunately for Liu, his fame as a poet was so great that it backfired, crushing his hopes of becoming an official. When the well-known young man took the Imperial Exam and passed all the tests, the emperor rejected him, saying: “What do you need feats and fame for? You should just fill your cup and softly sing.” As a result Liu gave up all hope of become a politically-accomplished man and spent all his time and talent writing odes to the prostitutes with which he was so enamored. Liu intimately befriended the finest prostitutes of his era, finally finding himself impoverished and living off the financial aid of others. He died penniless but not friendless. Dozens of his prostitute “friends” funded his funeral. According to the Ming novelist Feng Menglong (冯梦龙), on the day of his funeral, “The whole city of Chang’an was dressed in white as his funeral procession was followed by all the prostitutes in town. The ground quivered with their mourning voices… For years to come, on every Tomb Sweeping Day, famous prostitutes would visit his grave and hold ceremonies. Those who didn’t attend the occasion would be too ashamed to appear for the spring excursion.” If Liu and Cai were Lennon and McCartney, then Bai Juyi (白居易) was unquestionably Elvis. Bai was a poet in the Tang Dynasty who also achieved fame for his friendships with prostitutes, with over 100 prostitutes mentioned in his poetry. His best known poem records his encounter with a prostitute on a boat. The poem, aside from its unerring literary virtue, reveals the typical life of a prostitute. On a misty, cold autumn night, Bai was attracted by the delicate sound of a four-stringed lute known as a pipa while he was feasting by the river of an obscure town, knowing it must have been played by a prostitute from the capital city. He sought out the girl, and she told her story. She was the best student of the pipa masters in Chang’an, and in her younger days she was the belle of society, heralded near and far. “The moneyed youths vied for the chance to present me brocades, and I receive numerous silks every time I finished a song. Combs mounted with gems were shattered beating to the rhythm, and many a time, spilled wine stained my scarlet skirt.” But the carnival couldn’t last forever, and, like all great beauties, she became old. Like many of the prostitutes of the time, she married a merchant. Her husband was seldom home, and she ended up playing her pipa alone on a boat. Bai sighed, “We are both roaming souls in this world. Now that we’ve met, there is no need for us to know each other.” The connection between prostitutes and poetry didn’t stop at appearances in the poems of males fawning at their feet; many were gifted poets themselves. The most famous was Xue Tao (薛涛), a government official’s daughter who was educated in poetry and painting as a young girl, and by the age of 15 she was already widely-known for her poetic talent. Unfortunately, her father died when he brought his daughter to Sichuan, leaving Xue Tao in financial hardship. She registered herself in the jiaofang as an offi cial prostitute and, undoubtedly, made the best of the career. She hosted celebrities—from revered scholars to high offi cials—and exchanged poems with almost all the important poets of her time. Even her residence became a tourist spot. If a man of decent social rank went to Chengdu without visiting Xue Tao, he would be embarrassed to say he had been to Chengdu at all. For most of her life she had been financially supported by Wei Gao (韦皋), a military general and the governor of Sichuan Province. On his death, Wei Gao left her a significant fortune, and she resigned herself to a quiet life by Washing Flower Brook near Chengdu, eventually dying at the ripe, old age of 73. Today there is still a park in Chengdu containing a pavilion where Xue is reputed to have contemplatively looked over the river. Xue’s contemporary, Yu Xuanji (鱼玄机), was another legendary poet, but had an altogether different tale to tell. Like Xue, Yu was known as a genius poet in her youth and wrote of her envy for men in a poem, “I resent this skirt that hides my poetry, and in vain I envy men with degrees.” She was married to the scholar Li Yi (李亿), as his concubine. However, Li couldn’t handle his fi rst wife’s ferocious jealousy, so, to protect Yu, he sent his mistress to a Taoist temple to meet with her in secret. During the Tang Dynasty, a Taoist temple could have very irreligious connotations. When Emperor Xuanzong (唐宣宗) visited a Taoist temple, he was astounded and enraged by the “nuns” wearing heavy make-up and dressed in bright colors who were obviously not living the chaste and secluded life that was expected of them. In the Tang Dynasty, 21 princesses became Taoist nuns, and they were known for their extravagant way of life in the temples, with no abstention from wine, partying, or men. For women who felt displaced as wives or concubines, Taoist temples were a haven, and with Yu Xuanji’s talents and libertine ways, she remained there. Li never returned. She had many sexual affairs, and her love poems were seldom addressed to the same man. However, she was later accused of whipping her maid to death—accusations that are likely false—and was executed at the age of 22. Her poems are still alive, displaying her liberal, individualistic nature and, ultimately, she received far greater critical acclaim than her contemporary Xue Tao. Today, academics hold her up as an early feminist icon. By the Ming Dynasty, prostitutes’ social statuses had marginally changed. They could attend scholars’ meetings and even called themselves “brother” in their correspondence. Liu Rushi (柳如是) was a prostitute who lived during the transition of the Ming and Qing dynasties—one of the most independent figures in the history of Chinese women. When she wanted to make the acquaintance of Qian Qianyi (钱谦益), a famous scholar, she eschewed traditional gender roles and simply bought a boat to travel to see him on her own. She dressed herself in men’s clothes, and according to her biographer, Shen Qiu (沈虬), “carried an air so elegant and frank that she might as well have been a hermit.” Qian fell in love with her when he read her writings and eventually became her husband. Liu was also a fervent patriot who refused to surrender to the reign of the Qing government and involved herself in anti-Qing actions for most of her life. The Qing Dynasty brought with it, however, the death of the jiaofang, and as modernization progressed, prostitutes with noble talents became increasingly rare. Thus, perceptions began to change: it became common to make fun of prostitutes’ illiteracy and their clients’ vanity. The business of prostitution became greedier, and sex as a commodity took the place of the fine and honorable ancient Chinese prostitute. By the Qing Dynasty and right through to today, the noble art of prostitution has been in decline—making them little more than objects of men’s sexual desires. They are no longer seen as having great minds, being supreme dancers, pitch-perfect singers, or enchantresses who both write and are revered in the finest poetry. For all the barbarism of those bygone days, perhaps the tale of the ancient Chinese prostitute is one that modern man can take to heart.
/** * An immutable, tabular grid of locations that supplies row and column data as * {@link LocationList}s. * * <p>Internally, the grid is backed by a Guava {@link ArrayTable}. * * @author Peter Powers */ public final class LocationGrid implements Iterable<Location> { private final ArrayTable<Integer, Integer, Location> grid; // starts are inclusive, ends are exclusive private final int rowStart; private final int rowWidth; private final int rowEnd; private final int columnStart; private final int columnWidth; private final int columnEnd; // true if entire grid is being used private final boolean master; private LocationGrid( ArrayTable<Integer, Integer, Location> grid, int rowStart, int rowWidth, int columnStart, int columnWidth) { this.grid = grid; this.rowStart = rowStart; this.rowWidth = rowWidth; this.rowEnd = rowStart + rowWidth; this.columnStart = columnStart; this.columnWidth = columnWidth; this.columnEnd = columnStart + columnWidth; this.master = rowStart == 0 && rowWidth == grid.rowKeyList().size() && columnStart == 0 && columnWidth == grid.columnKeyList().size(); } /** * Return the number of {@code Location}s in this grid. */ public int size() { return rowWidth * columnWidth; } /** * Return the number of rows in this grid. */ public int rows() { return rowWidth; } /** * Return the row at index. * @param index of the row to retrieve * @throws IndexOutOfBoundsException if {@code index < 0 || index >= rows()} */ public LocationList row(int index) { return new Row(rowStart + checkElementIndex(index, rows())); } /** * Return the first row. */ public LocationList firstRow() { return row(0); } /** * Return the last row. */ public LocationList lastRow() { return row(rows() - 1); } /** * Return the number of columns in this grid. */ public int columns() { return columnWidth; } /** * Return the column at index. * @param index of the column to retrieve * @throws IndexOutOfBoundsException if * {@code index < 0 || index >= columns()} */ public LocationList column(int index) { return new Column(columnStart + checkElementIndex(index, columns())); } /** * Return the first column. */ public LocationList firstColumn() { return column(0); } /** * Return the last column. */ public LocationList lastColumn() { return column(columns() - 1); } /** * Return a new grid that is a window into this one. The specified window * dimensions must be less than or equal to the dimensions of this grid. * * @param rowStart first row of window * @param rowWidth number of rows in the window * @param columnStart first column of window * @param columnWidth */ public LocationGrid window(int rowStart, int rowWidth, int columnStart, int columnWidth) { checkElementIndex(rowStart, this.rowWidth); checkPositionIndex(rowStart + rowWidth, this.rowWidth); checkElementIndex(columnStart, this.columnWidth); checkPositionIndex(columnStart + columnWidth, this.columnWidth); return new LocationGrid( this.grid, this.rowStart + rowStart, rowWidth, this.columnStart + columnStart, columnWidth); } /** * Return the parent grid. Method returns itself unless it was created using * one or more calls to {@link #window(int, int, int, int)}. In this case, a * grid equivalent to this grid's greatest ancestor is returned. */ public LocationGrid parent() { return master ? this : new LocationGrid( grid, 0, grid.rowKeyList().size(), 0, grid.columnKeyList().size()); } /** * Lazily compute the bounds of the {@code Location}s in this grid. Method * delegates to {@link Locations#bounds(Iterable)}. */ public Bounds bounds() { return Locations.bounds(this); } @Override public String toString() { StringBuilder sb = new StringBuilder("LocationGrid [") .append(rowWidth).append(" x ") .append(columnWidth).append("]") .append(" window=").append(!master); if (!master) { sb.append(" [parent ") .append(grid.rowKeyList().size()).append("r x ") .append(grid.columnKeyList().size()).append("c]"); } sb.append(NEWLINE); LocationList firstRow = firstRow(); int lastColumnIndex = rowWidth - 1; int lastRowIndex = columnWidth - 1; appendCorner(sb, 0, 0, firstRow.first()); appendCorner(sb, 0, lastColumnIndex, firstRow.last()); LocationList lastRow = lastRow(); appendCorner(sb, lastRowIndex, 0, lastRow.first()); appendCorner(sb, lastRowIndex, lastColumnIndex, lastRow.last()); if (size() < 1024) { sb.append("Locations:").append(NEWLINE); for (int i = 0; i < rows(); i++) { for (int j = 0; j < columns(); j++) { appendLocation(sb, i, j, grid.at(i, j)); } sb.append(NEWLINE); } } return sb.toString(); } private static void appendCorner(StringBuilder builder, int row, int column, Location loc) { builder.append("Corner: "); appendLocation(builder, row, column, loc); } private static void appendLocation(StringBuilder builder, int row, int column, Location loc) { builder.append(padStart(Integer.toString(row), 5, ' ')) .append(padStart(Integer.toString(column), 5, ' ')) .append(" ").append(loc) .append(NEWLINE); } @Override public Iterator<Location> iterator() { return new Iterator<Location>() { private int rowIndex = rowStart; private int columnIndex = columnStart; @Override public boolean hasNext() { return columnIndex < columnEnd && rowIndex < rowEnd; } @Override public Location next() { Location loc = grid.at(rowIndex, columnIndex++); if (columnIndex == columnEnd) { rowIndex++; } return loc; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } private class Row extends LocationList { private final int rowIndex; private Row(int rowIndex) { this.rowIndex = rowIndex; } @Override public int size() { return grid.columnKeyList().size(); } @Override public Location get(int index) { return grid.at(rowIndex, index); } @Override public Iterator<Location> iterator() { return new Iterator<Location>() { private int columnIndex; @Override public boolean hasNext() { return columnIndex < columnEnd; } @Override public Location next() { return grid.at(rowIndex, columnIndex++); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } } private class Column extends LocationList { private final int columnIndex; private Column(int columnIndex) { this.columnIndex = columnIndex; } @Override public int size() { return grid.rowKeyList().size(); } @Override public Location get(int index) { return grid.at(index, columnIndex); } @Override public Iterator<Location> iterator() { return new Iterator<Location>() { private int rowIndex; @Override public boolean hasNext() { return rowIndex < rowEnd; } @Override public Location next() { return grid.at(rowIndex++, columnIndex); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } } /** * Return a new builder. * * @param rows expected number of rows * @param columns expected number of columns */ public static Builder builder(int rows, int columns) { return new Builder(rows, columns); } /** * A single-use builder of {@code LocationGrid}s. Use * {@link LocationGrid#builder(int, int)} to create new builder instances. */ public static class Builder { private final ArrayTable<Integer, Integer, Location> grid; private boolean built = false; private Builder(int rows, int columns) { grid = ArrayTable.create( ContiguousSet.create(Range.closedOpen(0, rows), DiscreteDomain.integers()), ContiguousSet.create(Range.closedOpen(0, columns), DiscreteDomain.integers())); }; /** * Set the Location at the specified {@code row} and {@code column} indices. * * @param row index of location to set * @param column index of location to set * @param loc to set * @return this {@code Builder} */ public Builder set(int row, int column, Location loc) { grid.set(row, column, loc); return this; } /** * Fill a row with the specified {@code Location}s. * * @param index of row to fill * @param locs to fill row with * @return this {@code Builder} */ public Builder fillRow(int index, LocationList locs) { checkArgument(locs.size() == grid.columnKeyList().size()); int column = 0; for (Location loc : locs) { grid.set(index, column++, loc); } return this; } /** * Fill a row with the specified {@code Location}s. * * @param index of column to fill * @param locs to fill column with * @return this {@code Builder} */ public Builder fillColumn(int index, LocationList locs) { checkArgument(locs.size() == grid.rowKeyList().size()); int row = 0; for (Location loc : locs) { grid.set(row++, index, loc); } return this; } /** * Return a newly created {@code LocationGrid}. */ public LocationGrid build() { checkState(!grid.containsValue(null), "Some Locations have not been set"); checkState(!built, "This builder has already been used"); return new LocationGrid( grid, 0, grid.rowKeyList().size(), 0, grid.columnKeyList().size()); } } // TODO clean public static void main(String[] args) { Set<Integer> strikeIndices = ContiguousSet.create( Range.closedOpen(0, 9), DiscreteDomain.integers()); Set<Integer> dipIndices = ContiguousSet.create( Range.closedOpen(0, 4), DiscreteDomain.integers()); ArrayTable<Integer, Integer, Location> t = ArrayTable.create(dipIndices, strikeIndices); for (int dipIndex : dipIndices) { for (int strikeIndex : strikeIndices) { Location loc = Location.create( 34.0 + 0.2 * strikeIndex, -117.4 + 0.1 * dipIndex); t.set(dipIndex, strikeIndex, loc); } } LocationGrid grid2 = new LocationGrid(t, 0, dipIndices.size(), 0, strikeIndices.size()); System.out.println(grid2); int rows = 4; int cols = 9; Builder b = builder(4, 9); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { Location loc = Location.create( 34.0 + 0.2 * j, -117.4 + 0.1 * i); b.set(i, j, loc); } } System.out.println(b.grid); LocationGrid grid = b.build(); System.out.println(grid.grid.rowKeyList()); System.out.println(grid.grid.columnKeyList()); System.out.println(grid); // System.out.println(grid.lastColumn()); // Iterator<Location> it = grid.firstRow().iterator(); // System.out.println(it); // TODO test itertator.remove() } }
<gh_stars>1-10 #ifndef __RECYCLER_TESTS_FOO_HPP__ #define __RECYCLER_TESTS_FOO_HPP__ #include <cstring> #include <cstdint> namespace recycler { template<size_t SIZE = 512> class Foo { public: Foo() = default; void reset() { //std::memset(dummyData, 0, SIZE); }; uint8_t dummyData[SIZE] = {}; }; typedef std::shared_ptr<Foo<>> SharedFoo; } #endif
#include <sys/epoll.h> #include <signal.h> #include "try-common.h" #include "try-epoll.h" /************************ INTERFACE ************************/ void epoll_loop(int server_socket) { struct epoll_event events[MAXIMUM_NUMBER_OF_CONNECTIONS]; int epfd; int timeout; sigset_t sigmask; sigemptyset(&sigmask); sigaddset(&sigmask, SIGINT); timeout = TIMEOUT; epfd = _request_epoll_fd(); _register_epoll_socket(epfd, server_socket); while (true) { int number_of_events; number_of_events = epoll_wait(epfd, events, sizeof events, timeout); switch (number_of_events) { case ERROR: throw("Error on epoll"); case 0: die("Timeout on epoll\n"); } _handle_epoll_requests(epfd, server_socket, events, number_of_events); } close(epfd); } int _request_epoll_fd() { int epfd; if ((epfd = epoll_create1(EPOLL_CLOEXEC)) == ERROR) { throw("Error requesting epoll master file descriptor"); } return epfd; } void _register_epoll_socket(int epfd, int socket_fd) { struct epoll_event event; event.events = EPOLLIN; event.data.fd = socket_fd; if (epoll_ctl(epfd, EPOLL_CTL_ADD, socket_fd, &event) == ERROR) { throw("Error adding socket to epoll instance"); } } void _remove_epoll_socket(int epfd, int socket_fd) { if (epoll_ctl(epfd, EPOLL_CTL_DEL, socket_fd, NULL) == ERROR) { throw("Error removing socket from epoll instance"); } if (close(socket_fd) == ERROR) { throw("Error closing socket descriptor"); } printf("Death to client %d\n", socket_fd); } void _accept_epoll_connections(int epfd, int server_socket) { int client_socket; if ((client_socket = accept(server_socket, NULL, NULL)) == ERROR) { throw("Error accepting connection on server side"); } _register_epoll_socket(epfd, client_socket); printf("New connection: %d\n", client_socket); } void _handle_epoll_requests(int epfd, int server_socket, struct epoll_event* events, size_t number_of_events) { char buffer[MESSAGE_SIZE]; assert(number_of_events > 0); for (size_t index = 0; index < number_of_events; ++index) { int fd = events[index].data.fd; assert(events[index].events & EPOLLIN); if (fd == server_socket) { _accept_epoll_connections(epfd, server_socket); } else { int amount_read; if ((amount_read = read(fd, buffer, MESSAGE_SIZE)) == 0) { _remove_epoll_socket(epfd, fd); } else if (amount_read < MESSAGE_SIZE) { throw("Error reading on server side"); } else { if (write(fd, buffer, MESSAGE_SIZE) < MESSAGE_SIZE) { throw("Error writing on server side"); } } } } }
<reponame>bengmathew/tpaw import {ChartContext} from '../ChartContext' export type ChartRegisterAnimation = < T extends gsap.core.Tween | gsap.core.Timeline >( x: T ) => T export interface ChartComponent<Data> { draw: (ctx: ChartContext<Data>) => void destroy?: () => void update?: ( change: 'init' | 'pointer' | 'state' | 'sizing', ctx: ChartContext<Data>, registerAnimation: ChartRegisterAnimation ) => void }
<gh_stars>0 use super::*; #[derive(Debug, Clone)] pub enum TestMessage { // Test is a unit-like instruction with no parameters Test, // TestData has a single parameter, as a tuple TestData(usize), // TestStruct is an example of passing a structure TestStruct(TestStruct), // TestCallback illustrates passing a sender and a structure to be sent back to the sender TestCallback(TestMessageSender, TestStruct), // AddSender can be implemented to push a sender onto a list of senders AddSender(TestMessageSender), // AddSenders can be implemented to push a vec of senders onto a list of senders AddSenders(Vec<TestMessageSender>), // RemoveAllSeners can be implemented to clear list of senders RemoveAllSenders, // Notify, is setup for a notification via TestData, where usize is a message count Notify(TestMessageSender, usize), // ForwardingMultiplier provides a parameter to the forwarder ForwardingMultiplier(usize), // Random message sending, illustrates that a variant struct can be used as well as a tuple ChaosMonkey { // A counter which is either incremented or decremented counter: u32, // The maximum value of the counter max: u32, // the type of mutation applied to the counter mutation: ChaosMonkeyMutation, }, } // Generate these from MachineImpl pub type TestMessageSender = smol::channel::Sender<TestMessage>; pub type TestMessageReceiver = smol::channel::Receiver<TestMessage>; impl MachineImpl for TestMessage { type Adapter = MachineBuilderTestMessage; type InstructionSet = TestMessage; } pub struct MachineAdapterTestMessage {} #[derive(Debug, Clone)] pub struct MachineSenderTestMessage { sender: smol::channel::Sender<TestMessage>, executor: std::sync::Arc<smol::Executor<'static>>, } impl TestMessage { pub fn advance(self) -> Self { match self { Self::ChaosMonkey { counter, max, mutation } => Self::advance_chaos_monkey(counter, max, mutation), _ => self, } } // return true if advancing will mutate, false if advance has no effect pub fn can_advance(&self) -> bool { match self { Self::ChaosMonkey { counter, mutation, .. } => *counter != 0 || mutation != &ChaosMonkeyMutation::Decrement, _ => false, } } // Advance the chaos monkey variant by increment the counter until it reaches its maximum value, then decrement it. // Once the counter reaches 0, no further advancement is performed. const fn advance_chaos_monkey(counter: u32, max: u32, mutation: ChaosMonkeyMutation) -> Self { match counter { 0 => match mutation { ChaosMonkeyMutation::Increment => Self::ChaosMonkey { counter: counter + 1, max, mutation, }, ChaosMonkeyMutation::Decrement => Self::ChaosMonkey { counter, max, mutation }, }, c if c >= max => match mutation { ChaosMonkeyMutation::Increment => Self::ChaosMonkey { counter, max, mutation: ChaosMonkeyMutation::Decrement, }, ChaosMonkeyMutation::Decrement => Self::ChaosMonkey { counter: counter - 1, max, mutation, }, }, _ => match mutation { ChaosMonkeyMutation::Increment => Self::ChaosMonkey { counter: counter + 1, max, mutation, }, ChaosMonkeyMutation::Decrement => Self::ChaosMonkey { counter: counter - 1, max, mutation, }, }, } } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum ChaosMonkeyMutation { Increment, Decrement, } #[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] pub struct TestStruct { pub from_id: usize, pub received_by: usize, } #[cfg(test)] mod tests { use super::*; #[test] fn test_chaos_monkey_variant() { let v = TestMessage::ChaosMonkey { counter: 0, max: 1, mutation: ChaosMonkeyMutation::Increment, }; assert_eq!(true, v.can_advance()); if let TestMessage::ChaosMonkey { counter, max, mutation } = v { assert_eq!(counter, 0); assert_eq!(max, 1); assert_eq!(mutation, ChaosMonkeyMutation::Increment); } else { assert_eq!(true, false) } } #[test] fn test_advance() { let v = TestMessage::ChaosMonkey { counter: 0, max: 1, mutation: ChaosMonkeyMutation::Increment, }; let v = v.advance(); if let TestMessage::ChaosMonkey { counter, max, mutation } = v { assert_eq!(counter, 1); assert_eq!(max, 1); assert_eq!(mutation, ChaosMonkeyMutation::Increment); } else { assert_eq!(true, false) } assert_eq!(true, v.can_advance()); } #[test] fn test_advance_ends() { let v = TestMessage::ChaosMonkey { counter: 0, max: 1, mutation: ChaosMonkeyMutation::Increment, }; let v = v.advance(); let v = v.advance(); assert_eq!(true, v.can_advance()); let v = v.advance(); if let TestMessage::ChaosMonkey { counter, max, mutation } = v { assert_eq!(counter, 0); assert_eq!(max, 1); assert_eq!(mutation, ChaosMonkeyMutation::Decrement); } else { assert_eq!(true, false) } assert_eq!(false, v.can_advance()); let v = v.advance(); if let TestMessage::ChaosMonkey { counter, max, mutation } = v { assert_eq!(counter, 0); assert_eq!(max, 1); assert_eq!(mutation, ChaosMonkeyMutation::Decrement); } else { assert_eq!(true, false) } } }
<gh_stars>0 package se.definewild.mediadist.db; import javax.annotation.Generated; import javax.persistence.metamodel.ListAttribute; import javax.persistence.metamodel.SingularAttribute; import javax.persistence.metamodel.StaticMetamodel; import se.definewild.mediadist.db.Mediaitems; import se.definewild.mediadist.db.Subscriptions; @Generated(value="EclipseLink-2.5.2.v20140319-rNA", date="2015-05-18T15:05:05") @StaticMetamodel(Media.class) public class Media_ { public static volatile ListAttribute<Media, Mediaitems> mediaitemsList; public static volatile SingularAttribute<Media, String> name; public static volatile ListAttribute<Media, Subscriptions> subscriptionsList; public static volatile SingularAttribute<Media, Integer> id; }
def delete(self): if not self.polymorphic_disabled: return self.non_polymorphic().delete() return super().delete()
// FindFree finds free (inactive) farm for occupation func FindFree(db *sqlx.DB, serviceName string) (*Instance, error) { farm := &Instance{} tx, err := db.Begin() if err != nil { return nil, err } row := tx.QueryRow(` SELECT id, name, mailer_state, subscriber_state, password, portal_username, portal_password FROM farms WHERE ` + serviceName + `_state = 'inactive' LIMIT 1 FOR UPDATE `) if err != nil { tx.Rollback() return nil, err } err = row.Scan( &(farm.ID), &(farm.Name), &(farm.MailerState), &(farm.SubscriberState), &(farm.Password), &(farm.PortalUsername), &(farm.PortalPassword), ) if err == sql.ErrNoRows { tx.Rollback() return nil, errors.New("No free farms") } else { if err != nil { tx.Rollback() return nil, err } } _, err = tx.Exec(`UPDATE farms SET `+serviceName+`_state = 'active' WHERE id = $1`, farm.ID) if err != nil { tx.Rollback() return nil, err } tx.Commit() if serviceName == "mailer" { farm.MailerState = StateActive } else { farm.SubscriberState = StateActive } return farm, nil }
<filename>core/src/services/rendezvous.ts import * as k8s from "@pulumi/kubernetes"; import config, { ServiceConfig } from "../config"; import { provider } from "../cluster/provider"; export function createRendezvousService(serviceConfig: ServiceConfig) { const metadata = { name: `concurrentai-${serviceConfig.id}-rendezvous` }; const appLabels = { run: `concurrentai-${serviceConfig.id}-rendezvous` }; const deployment = new k8s.apps.v1.Deployment( `concurrentai-${serviceConfig.id}-rendezvous-deployment`, { metadata: metadata, spec: { selector: { matchLabels: appLabels }, replicas: 1, template: { metadata: { labels: appLabels }, spec: { containers: [ { name: "api", ports: [{ containerPort: 9000 }], image: `concurrentai/concurrentai-core-rendezvous-api:latest`, imagePullPolicy: "Always", env: [ { name: "ORGANIZATION_ID", value: config.concurrentai.organizationId, }, { name: "SERVICE_ID", value: serviceConfig.id, }, { name: "PULSAR_URL", value: config.pulsar.url, }, ], volumeMounts: [ { name: "rendezvous-sockets", mountPath: "/sockets", }, ], }, { name: "collector", image: `concurrentai/concurrentai-core-rendezvous-collector:latest`, imagePullPolicy: "Always", env: [ { name: "ORGANIZATION_ID", value: config.concurrentai.organizationId, }, { name: "SERVICE_ID", value: serviceConfig.id, }, { name: "ACTIVE_MODEL_ID", value: serviceConfig.models.find((model) => model.live)?.id, }, { name: "PULSAR_URL", value: config.pulsar.url, }, ], volumeMounts: [ { name: "rendezvous-sockets", mountPath: "/sockets", }, ], }, ], volumes: [ { name: "rendezvous-sockets", emptyDir: {}, }, ], }, }, }, }, { provider, } ); const service = new k8s.core.v1.Service( `concurrentai-${serviceConfig.id}-rendezvous-service`, { metadata: metadata, spec: { ports: [{ port: 80, targetPort: 9000 }], selector: appLabels, }, }, { provider, } ); return { deployment, service, }; }
from collections import deque N = int(input()) v = list(map(int, input().split())) v.sort() V = deque(v) sum = 0 while True: sum = (V.popleft() + V.popleft())/2 V.appendleft(sum) if len(V) == 1: break print(V[0])
package com.fib.upp.entity; import java.math.BigDecimal; import lombok.Data; /** * * @author fangyh * @version 1.0 * @since 1.0 * @date 2021-02-24 */ @Data public class BatchProcess { private String batchType; private String processStatus; private String endDateTime; private BigDecimal transactionSum; }
/** * Created by LinShunkang on 2018/8/22 */ public class JvmMetricsScheduler implements Scheduler { private JvmClassMetricsProcessor classMetricsProcessor; private JvmGCMetricsProcessor gcMetricsProcessor; private JvmMemoryMetricsProcessor memoryMetricsProcessor; private JvmBufferPoolMetricsProcessor bufferPoolMetricsProcessor; private JvmThreadMetricsProcessor threadMetricsProcessor; public JvmMetricsScheduler(JvmClassMetricsProcessor classMetricsProcessor, JvmGCMetricsProcessor gcMetricsProcessor, JvmMemoryMetricsProcessor memoryMetricsProcessor, JvmBufferPoolMetricsProcessor bufferPoolMetricsProcessor, JvmThreadMetricsProcessor threadMetricsProcessor) { this.classMetricsProcessor = classMetricsProcessor; this.gcMetricsProcessor = gcMetricsProcessor; this.memoryMetricsProcessor = memoryMetricsProcessor; this.bufferPoolMetricsProcessor = bufferPoolMetricsProcessor; this.threadMetricsProcessor = threadMetricsProcessor; } @Override public void run(long lastTimeSliceStartTime, long millTimeSlice) { long stopMillis = lastTimeSliceStartTime + millTimeSlice; processJVMClassMetrics(lastTimeSliceStartTime, lastTimeSliceStartTime, stopMillis); processJVMGCMetrics(lastTimeSliceStartTime, lastTimeSliceStartTime, stopMillis); processJVMMemoryMetrics(lastTimeSliceStartTime, lastTimeSliceStartTime, stopMillis); processJVMBufferPoolMetrics(lastTimeSliceStartTime, lastTimeSliceStartTime, stopMillis); processJVMThreadMetrics(lastTimeSliceStartTime, lastTimeSliceStartTime, stopMillis); } private void processJVMClassMetrics(long processId, long startMillis, long stopMillis) { JvmClassMetrics classMetrics = new JvmClassMetrics(ManagementFactory.getClassLoadingMXBean()); classMetricsProcessor.beforeProcess(processId, startMillis, stopMillis); try { classMetricsProcessor.process(classMetrics, processId, startMillis, stopMillis); } finally { classMetricsProcessor.afterProcess(processId, startMillis, stopMillis); } } private void processJVMGCMetrics(long processId, long startMillis, long stopMillis) { gcMetricsProcessor.beforeProcess(processId, startMillis, stopMillis); try { List<GarbageCollectorMXBean> garbageCollectorMxBeans = ManagementFactory.getGarbageCollectorMXBeans(); for (GarbageCollectorMXBean bean : garbageCollectorMxBeans) { JvmGCMetrics gcMetrics = new JvmGCMetrics(bean); gcMetricsProcessor.process(gcMetrics, processId, startMillis, stopMillis); } } finally { gcMetricsProcessor.afterProcess(processId, startMillis, stopMillis); } } private void processJVMMemoryMetrics(long processId, long startMillis, long stopMillis) { MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); MemoryUsage nonHeapMem = memoryMXBean.getNonHeapMemoryUsage(); MemoryUsage heapMem = memoryMXBean.getHeapMemoryUsage(); memoryMetricsProcessor.beforeProcess(processId, startMillis, stopMillis); try { memoryMetricsProcessor.process(new JvmMemoryMetrics(nonHeapMem, heapMem), processId, startMillis, stopMillis); } finally { memoryMetricsProcessor.afterProcess(processId, startMillis, stopMillis); } } private void processJVMBufferPoolMetrics(long processId, long startMillis, long stopMillis) { bufferPoolMetricsProcessor.beforeProcess(processId, startMillis, stopMillis); try { List<BufferPoolMXBean> pools = ManagementFactory.getPlatformMXBeans(BufferPoolMXBean.class); for (BufferPoolMXBean mxBean : pools) { bufferPoolMetricsProcessor.process(new JvmBufferPoolMetrics(mxBean), processId, startMillis, stopMillis); } } finally { bufferPoolMetricsProcessor.afterProcess(processId, startMillis, stopMillis); } } private void processJVMThreadMetrics(long processId, long startMillis, long stopMillis) { ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); threadMetricsProcessor.beforeProcess(processId, startMillis, stopMillis); try { threadMetricsProcessor.process(new JvmThreadMetrics(threadMXBean), processId, startMillis, stopMillis); } finally { threadMetricsProcessor.afterProcess(processId, startMillis, stopMillis); } } }
<filename>src/MiniGen/Page/index.ts import Page from './Page'; export { Page };
<filename>src/members/members.service.ts<gh_stars>0 import { Injectable, NotFoundException } from '@nestjs/common'; import { InjectRepository } from '@nestjs/typeorm'; import { Band } from 'src/bands/entities/band.entity'; import { Instrument } from 'src/instruments/entities/instrument.entity'; import { Repository } from 'typeorm'; import { CreateMemberDto } from './dto/create-member.dto'; import { UpdateMemberDto } from './dto/update-member.dto'; import { Member } from './entities/member.entity'; @Injectable() export class MembersService { constructor( @InjectRepository(Band) private readonly bandRepo: Repository<Band>, @InjectRepository(Member) private readonly memberRepo: Repository<Member>, @InjectRepository(Instrument) private readonly instrumentRepo: Repository<Instrument> ) {} async create(createMemberDto: CreateMemberDto) { const band = await this.bandRepo.findOne({ id: createMemberDto.bandId }); if (!band) { throw new NotFoundException(`Band with id ${createMemberDto.bandId} was not found.`); } const instrument = await this.instrumentRepo.findOne({ id: createMemberDto.instrumentId }) if (!instrument) { throw new NotFoundException(`Instrument with id ${createMemberDto.instrumentId} was not found.`); } const member = this.memberRepo.create(createMemberDto); member.band = band; member.instruments = []; member.instruments.push(instrument); return this.memberRepo.save(member); } findAll() { return `This action returns all members`; } findOne(id: number) { return `This action returns a #${id} member`; } update(id: number, updateMemberDto: UpdateMemberDto) { return `This action updates a #${id} member`; } remove(id: number) { return `This action removes a #${id} member`; } }
#include <stdio.h> #include <stdlib.h> //Normal Gcd /*int gcd ( int a, int b ) { int c; while ( a != 0 ) { c = a; a = b%a; b = c; } return b; }*/ //Recursive Gcd /*int gcdr ( int a, int b ) { if ( a==0 ) return b; return gcdr ( b%a, a ); }*/ //int fx[]={0,0,1,-1}; //int fy[]={1,-1,0,0}; //int fx[]={0,0,1,1,1,-1,-1,-1}; //int fy[]={1,-1,0,1,-1,0,1,-1}; //int fx[]={1,1,-1,-1,2,-2,2,-2}; //int fy[]={2,-2,2,-2,1,1,-1,-1}; int main() { int n; scanf("%d",&n); int a,b; int total=0; for(int i=0;i<n;i++) { scanf("%d %d",&a,&b); if(total+a<=500) { printf("A"); total+=a; } else { printf("G"); total-=b; } } printf("\n"); return 0; }
/** * NumberTheory - summation / products etc... * * @author <b>Mariusz Gromada</b><br> * <a href="mailto:[email protected]">[email protected]</a><br> * <a href="http://mathspace.pl" target="_blank">MathSpace.pl</a><br> * <a href="http://mathparser.org" target="_blank">MathParser.org - mXparser project page</a><br> * <a href="http://github.com/mariuszgromada/MathParser.org-mXparser" target="_blank">mXparser on GitHub</a><br> * <a href="http://mxparser.sourceforge.net" target="_blank">mXparser on SourceForge</a><br> * <a href="http://bitbucket.org/mariuszgromada/mxparser" target="_blank">mXparser on Bitbucket</a><br> * <a href="http://mxparser.codeplex.com" target="_blank">mXparser on CodePlex</a><br> * <a href="http://janetsudoku.mariuszgromada.org" target="_blank">Janet Sudoku - project web page</a><br> * <a href="http://github.com/mariuszgromada/Janet-Sudoku" target="_blank">Janet Sudoku on GitHub</a><br> * <a href="http://janetsudoku.codeplex.com" target="_blank">Janet Sudoku on CodePlex</a><br> * <a href="http://sourceforge.net/projects/janetsudoku" target="_blank">Janet Sudoku on SourceForge</a><br> * <a href="http://bitbucket.org/mariuszgromada/janet-sudoku" target="_blank">Janet Sudoku on BitBucket</a><br> * * @version 3.0.0 */ public final class NumberTheory { /** * Minimum function. * * @param a the a function parameter * @param b the b function parameter * * @return if a,b &lt;&gt; Double.NaN returns Math.min(a, b), * otherwise returns Double.NaN. */ public static final double min(double a, double b) { if (Double.isNaN(a) || Double.isNaN(b)) return Double.NaN; return Math.min(a, b); } /** * Minimum function. * * @param numbers the a function parameter * * @return if each number form numbers &lt;&gt; Double.NaN returns the smallest number, * otherwise returns Double.NaN. */ public static final double min(double... numbers) { double min = Double.POSITIVE_INFINITY; for (double number : numbers) { if (Double.isNaN(number)) return Double.NaN; if (number < min) min = number; } return min; } /** * Maximum function. * * @param a the a function parameter * @param b the b function parameter * * @return if a,b &lt;&gt; Double.NaN returns Math.max(a, b), * otherwise returns Double.NaN. */ public static final double max(double a, double b) { if (Double.isNaN(a) || Double.isNaN(b)) return Double.NaN; return Math.max(a, b); } /** * Maximum function. * * @param numbers the a function parameter * * @return if each number form numbers &lt;&gt; Double.NaN returns the highest number, * otherwise returns Double.NaN. */ public static final double max(double... numbers) { double max = Double.NEGATIVE_INFINITY; for (double number : numbers) { if (Double.isNaN(number)) return Double.NaN; if (number > max) max = number; } return max; } /** * Greatest common divisor (GCD) * * @param a the a function parameter * @param b the b function parameter * @return GCD(a,b) */ public static final double gcd(int a, int b) { a = Math.abs(a); b = Math.abs(b); if (a == 0) return b; while (b != 0) if (a > b) a -= b; else b -= a; return a; } /** * Greatest common divisor (GCD) * * @param a the a function parameter * @param b the b function parameter * * @return if a, b &lt;&gt; Double.NaN returns gcd( (int)Math.round(a),(int)Math.round(b) ), * otherwise returns Double.NaN. */ public static final double gcd(double a, double b) { if ( Double.isNaN(a) || Double.isNaN(a) ) return Double.NaN; return gcd( (int)Math.round(a),(int)Math.round(b) ); } /** * Greatest common divisor (GCD) * * @param numbers the numbers * * @return GCD(a_1,...,a_n) a_1,...,a_n in numbers */ public static final double gcd(int... numbers) { if (numbers.length == 1) return numbers[0]; if (numbers.length == 2) return gcd( numbers[0], numbers[1] ); for (int i = 1; i < numbers.length; i++) numbers[i] = (int)gcd( numbers[i-1], numbers[i] ); return numbers[numbers.length-1]; } /** * Greatest common divisor (GCD) * * @param numbers the numbers * * @return if each number form numbers &lt;&gt; Double.NaN returns * GCD(a_1,...,a_n) a_1,...,a_n in numbers, * otherwise returns Double.NaN. */ public static final double gcd(double... numbers) { int[] intNumbers = new int[numbers.length]; for(int i = 0; i < numbers.length; i++) { double n = numbers[i]; if ( Double.isNaN(n) ) return Double.NaN; intNumbers[i] = (int)Math.round(n); } return gcd(intNumbers); } /** * Latest common multiply (LCM) * * @param a the a function parameter * @param b the b function parameter * * @return LCM(a,b) */ public static final double lcm(int a, int b) { if ( (a == 0) || (b == 0) ) return 0; return Math.abs(a*b) / gcd(a, b); } /** * Latest common multiply (LCM) * * @param a the a function parameter * @param b the b function parameter * * @return if a, b &lt;&gt; Double.NaN returns lcm( (int)Math.round(a), (int)Math.round(b) ), * otherwise returns Double.NaN. */ public static final double lcm(double a, double b) { if ( Double.isNaN(a) || Double.isNaN(a) ) return Double.NaN; return lcm( (int)Math.round(a), (int)Math.round(b) ); } /** * Latest common multiply (LCM) * * @param numbers the numbers * * @return LCM(a_1,...,a_n) a_1,...,a_n in numbers */ public static final double lcm(int... numbers) { if (numbers.length == 1) return numbers[0]; if (numbers.length == 2) return lcm( numbers[0], numbers[1] ); for (int i = 1; i < numbers.length; i++) numbers[i] = (int)lcm( numbers[i-1], numbers[i] ); return numbers[numbers.length-1]; } /** * Latest common multiply (LCM) * * @param numbers the numbers * * @return if each number form numbers &lt;&gt; Double.NaN returns * LCM(a_1,...,a_n) a_1,...,a_n in numbers, * otherwise returns Double.NaN. */ public static final double lcm(double... numbers) { int[] intNumbers = new int[numbers.length]; for(int i = 0; i < numbers.length; i++) { double n = numbers[i]; if ( Double.isNaN(n) ) return Double.NaN; intNumbers[i] = (int)Math.round(n); if (intNumbers[i] == 0) return 0; } return lcm(intNumbers); } /** * Adding numbers. * * @param numbers the numbers * * @return if each number from numbers &lt;&gt; Double.NaN returns * sum(a_1,...,a_n) a_1,...,a_n in numbers, * otherwise returns Double.NaN. */ public static final double sum(double... numbers) { if (numbers.length == 0) return Double.NaN; if (numbers.length == 1) return numbers[0]; double sum = 0; for (double xi : numbers) { if ( Double.isNaN(xi) ) return Double.NaN; sum+=xi; } return sum; } /** * Numbers multiplication. * * @param numbers the numbers * * @return if each number from numbers &lt;&gt; Double.NaN returns * prod(a_1,...,a_n) a_1,...,a_n in numbers, * otherwise returns Double.NaN. */ public static final double prod(double... numbers) { if (numbers.length == 0) return Double.NaN; if (numbers.length == 1) return numbers[0]; double prod = 1; for (double xi : numbers) { if ( Double.isNaN(xi) ) return Double.NaN; prod*=xi; } return prod; } /** * Prime test * * @param n The number to be tested. * * @return true if number is prime, otherwise false */ public static final boolean primeTest(long n) { /* * 2 is a prime :-) */ if (n == 2) return true; /* * Even number is not a prime */ if (n % 2 == 0) return false; /* * Everything <= 1 is not a prime */ if (n <= 1) return false; /* * Will be searching for divisors till sqrt(n) */ long top = (long)Math.sqrt(n); /* * Supporting variable indicating odd end of primes cache */ long primesCacheOddEnd = 3; /* * If prime cache exist */ if (mXparser.primesCache != null) if ( mXparser.primesCache.cacheStatus == PrimesCache.CACHING_FINISHED ) { /* * If prime cache is ready and number we are querying * is in cache the cache answer will be returned */ if ( n <= mXparser.primesCache.maxNumInCache ) return mXparser.primesCache.isPrime[(int)n]; else { /* * If number is bigger than maximum stored in cache * the we are querying each prime in cache * and checking if it is a divisor of n */ long topCache = Math.min(top, mXparser.primesCache.maxNumInCache); long i; for (i = 3; i <= topCache; i+=2) { if (mXparser.primesCache.isPrime[(int)i] == true) if (n % i == 0) return false; } /* * If no prime divisor of n in primes cache * we are seting the odd end of prime cache */ primesCacheOddEnd = i; } } /* * Finally we are checking any odd number that * still left and is below sqrt(n) agains being * divisor of n */ for (long i = primesCacheOddEnd; i <= top; i+=2) if (n % i == 0) return false; return true; } /** * Prime test * * @param n The number to be tested. * * @return true if number is prime, otherwise false */ public static final double primeTest(double n) { if ( Double.isNaN(n) ) return Double.NaN; boolean isPrime = primeTest((long)n); if (isPrime == true) return 1; else return 0; } /** * Prime counting function * * @param n number * * @return Number of primes below or equal x */ public static final long primeCount(long n) { if (n <= 1) return 0; if (n == 2) return 1; long numberOfPrimes = 1; for (long i = 3; i <= n; i++) if( primeTest(i) == true) numberOfPrimes++; return numberOfPrimes; } /** * Prime counting function * * @param n number * * @return Number of primes below or equal x */ public static final double primeCount(double n) { return primeCount((long)n); } /** * Summation operator (SIGMA FROM i = a, to b, f(i) by delta * * @param f the expression * @param index the name of index argument * @param from FROM index = form * @param to TO index = to * @param delta BY delta * * @return summation operation (for empty summation operations returns 0). */ public static final double sigmaSummation(Expression f, Argument index, double from, double to, double delta) { double result = 0; if ( (Double.isNaN(delta) ) || (Double.isNaN(from) ) || (Double.isNaN(to) ) || (delta == 0) ) return Double.NaN; if ( (to >= from) && (delta > 0) ) { double i; for (i = from; i < to; i+=delta) result += mXparser.getFunctionValue(f, index, i); if ( delta - (i - to) > 0.5 * delta) result += mXparser.getFunctionValue(f, index, to); } else if ( (to <= from) && (delta < 0) ) { double i; for (i = from; i > to; i+=delta) result += mXparser.getFunctionValue(f, index, i); if ( delta - (to - i) > 0.5 * delta) result += mXparser.getFunctionValue(f, index, to); } else if (from == to) result += mXparser.getFunctionValue(f, index, from); return result; } /** * Product operator * * @param f the expression * @param index the name of index argument * @param from FROM index = form * @param to TO index = to * @param delta BY delta * * @return product operation (for empty product operations returns 1). * * @see Expression * @see Argument */ public static final double piProduct(Expression f, Argument index, double from, double to, double delta) { if ( (Double.isNaN(delta) ) || (Double.isNaN(from) ) || (Double.isNaN(to) ) || (delta == 0) ) return Double.NaN; double result = 1; if ( (to >= from) && (delta > 0) ) { double i; for (i = from; i < to; i+=delta) result *= mXparser.getFunctionValue(f, index, i); if ( delta - (i - to) > 0.5 * delta) result *= mXparser.getFunctionValue(f, index, to); } else if ( (to <= from) && (delta < 0) ) { double i; for (i = from; i > to; i+=delta) result *= mXparser.getFunctionValue(f, index, i); if ( delta - (to - i) > 0.5 * delta) result *= mXparser.getFunctionValue(f, index, to); } else if (from == to) result *= mXparser.getFunctionValue(f, index, from); return result; } /** * Minimum value - iterative operator. * * @param f the expression * @param index the name of index argument * @param from FROM index = form * @param to TO index = to * @param delta BY delta * * @return product operation (for empty product operations returns 1). * * @see Expression * @see Argument */ public static final double min(Expression f, Argument index, double from, double to, double delta) { if ( (Double.isNaN(delta) ) || (Double.isNaN(from) ) || (Double.isNaN(to) ) || (delta == 0) ) return Double.NaN; double min = Double.POSITIVE_INFINITY; double v; if ( (to >= from) && (delta > 0) ) { for (double i = from; i < to; i+=delta) { v = mXparser.getFunctionValue(f, index, i); if (v < min) min = v; } v = mXparser.getFunctionValue(f, index, to); if (v < min) min = v; } else if ( (to <= from) && (delta < 0) ) { for (double i = from; i > to; i+=delta) { v = mXparser.getFunctionValue(f, index, i); if (v < min) min = v; } v = mXparser.getFunctionValue(f, index, to); if (v < min) min = v; } else if (from == to) min = mXparser.getFunctionValue(f, index, from); return min; } /** * Maximum value - iterative operator. * * @param f the expression * @param index the name of index argument * @param from FROM index = form * @param to TO index = to * @param delta BY delta * * @return product operation (for empty product operations returns 1). * * @see Expression * @see Argument */ public static final double max(Expression f, Argument index, double from, double to, double delta) { if ( (Double.isNaN(delta) ) || (Double.isNaN(from) ) || (Double.isNaN(to) ) || (delta == 0) ) return Double.NaN; double max = Double.NEGATIVE_INFINITY; double v; if ( (to >= from) && (delta > 0) ) { for (double i = from; i < to; i+=delta) { v = mXparser.getFunctionValue(f, index, i); if (v > max) max = v; } v = mXparser.getFunctionValue(f, index, to); if (v > max) max = v; } else if ( (to <= from) && (delta < 0) ) { for (double i = from; i > to; i+=delta) { v = mXparser.getFunctionValue(f, index, i); if (v > max) max = v; } v = mXparser.getFunctionValue(f, index, to); if (v > max) max = v; } else if (from == to) max = mXparser.getFunctionValue(f, index, from); return max; } }
/** * Unit tests for {@link LettucePoolingConnectionProvider}. * * @author Mark Paluch */ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) class LettucePoolingConnectionProviderUnitTests { @Mock LettuceConnectionProvider connectionProviderMock; @Mock StatefulRedisConnection<byte[], byte[]> connectionMock; @Mock RedisAsyncCommands<byte[], byte[]> commandsMock; private LettucePoolingClientConfiguration config = LettucePoolingClientConfiguration.defaultConfiguration(); @BeforeEach void before() { when(connectionMock.async()).thenReturn(commandsMock); when(connectionProviderMock.getConnection(any())).thenReturn(connectionMock); } @Test // DATAREDIS-988 void shouldReturnConnectionOnRelease() { LettucePoolingConnectionProvider provider = new LettucePoolingConnectionProvider(connectionProviderMock, config); provider.release(provider.getConnection(StatefulRedisConnection.class)); verifyNoInteractions(commandsMock); } @Test // DATAREDIS-988 void shouldDiscardTransactionOnReleaseOnActiveTransaction() { LettucePoolingConnectionProvider provider = new LettucePoolingConnectionProvider(connectionProviderMock, config); when(connectionMock.isMulti()).thenReturn(true); provider.release(provider.getConnection(StatefulRedisConnection.class)); verify(commandsMock).discard(); } }
<commit_msg>Make retry.Do panic if it's started with interval less then 1/10 second <commit_before>package retry import ( "time" ) // Func is the function to retry returning true if it's successfully completed type Func = func() bool // Do retries provided function "attempts" times with provided interval and returning true if it's successfully completed func Do(attempts int, interval time.Duration, f Func) bool { for attempt := 0; ; attempt++ { if f() { return true } if attempt > attempts { break } time.Sleep(interval) } return false } <commit_after>package retry import ( "fmt" "time" ) // Func is the function to retry returning true if it's successfully completed type Func = func() bool // Do retries provided function "attempts" times with provided interval and returning true if it's successfully completed func Do(attempts int, interval time.Duration, f Func) bool { if interval < 1*time.Second/10 { panic(fmt.Sprintf("retry.Do used with interval less then 1/10 second, it seems dangerous: %s", interval)) } for attempt := 0; ; attempt++ { if f() { return true } if attempt > attempts { break } time.Sleep(interval) } return false }
<filename>src/Feature.rs // This file is part of caniuse-serde. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/caniuse-serde/master/COPYRIGHT. No part of predicator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2017 The developers of caniuse-serde. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/caniuse-serde/master/COPYRIGHT. /// A feature is a HTML, CSS or like feature that agents may not have support for. #[derive(Debug, Clone)] pub struct Feature<'a> { feature_name: &'a FeatureName, feature_detail: &'a FeatureDetail, } impl<'a> Feature<'a> { /// The name of this feature. #[inline(always)] pub fn feature_name(&self) -> &'a FeatureName { self.feature_name } /// The title of this feature. #[inline(always)] pub fn title(&self) -> &'a str { &self.feature_detail.title } /// The description of this feature. #[inline(always)] pub fn description(&self) -> &'a str { &self.feature_detail.description } /// The URL at which the specification of this feature can be found. #[inline(always)] pub fn specification_url(&self) -> &'a Url { &self.feature_detail.specification_url } /// The status of this feature. #[inline(always)] pub fn status(&self) -> &'a Status { &self.feature_detail.status } /// Links to additional documents detailing this feature or aspects of it. #[inline(always)] pub fn links(&self) -> &'a [Link] { &self.feature_detail.links[..] } /// Any bugs with this feature. Rarely used in the caniuse.com database. #[inline(always)] pub fn bugs(&self) -> &'a [Bug] { &self.feature_detail.bugs[..] } /// The caniuse.com database's categorisations of this feature. #[inline(always)] pub fn categories(&self) -> &'a [Category] { &self.feature_detail.categories[..] } /// The caniuse.com database's notes on this feature. #[inline(always)] pub fn general_notes(&self) -> &'a str { &self.feature_detail.notes } /// implementations; returns None if agent_name has no known usages. #[inline(always)] pub fn implementations_by_agents(&'a self, agent_name: &AgentName, lower_bound: Bound<&Version>, upper_bound: Bound<&Version>) -> Option<SupportRangeIterator<'a>> { match self.feature_detail.implementations_by_agents.get(agent_name) { None => None, Some(entry) => Some ( SupportRangeIterator { feature: self, range: entry.range((lower_bound, upper_bound)), } ), } } /// implementation; returns None if agent_name has no known usages. /// returns Some(None) if agent_name exists but not for the version. /// returns Some(Some(support) if agent_name exists and the version has known support #[inline(always)] pub fn implementation(&'a self, agent_name: &AgentName, version: &Version) -> Option<Option<Support<'a>>> { match self.feature_detail.implementations_by_agents.get(agent_name) { None => None, Some(entry) => { match entry.get(version) { None => Some(None), Some(support_detail) => Some(Some(Support { support_detail, feature: self, })) } } } } /// The supported usage of this feature; those agents where the feature is SupportMaturity::SupportedByDefault. #[inline(always)] pub fn supported_by_default_usage(&self) -> UsagePercentage { self.feature_detail.supported_by_default_usage } /// The supported usage of this feature; those agents where the feature is SupportMaturity::AlmostSupported. #[inline(always)] pub fn almost_supported_usage(&self) -> UsagePercentage { self.feature_detail.almost_supported_usage } /// The parent feature this one belongs to use; not widely used by the caniuse.com's database. #[inline(always)] pub fn parent_feature_if_any(&self) -> Option<&'a FeatureName> { self.feature_detail.parent.as_ref() } /// A list of keywords to make it easier to search for this feature. #[inline(always)] pub fn keywords(&self) -> &'a [String] { &self.feature_detail.keywords[..] } /// For Opera & Opera Mobile, assumes blink (but not for Opera Mini) /// For Opera Mini and Unknown browsers, returns an empty slice #[inline(always)] pub fn feature_identifiers(&self, agentName: &AgentName) -> &'a [String] { const NoNoneIdentifiers: [String; 0] = []; use self::AgentName::*; match *agentName { MicrosoftInternetExplorer => self.internet_explorer_feature_identifiers(), MicrosoftEdge => self.internet_explorer_feature_identifiers(), MozillaFirefox => self.firefox_feature_identifiers(), GoogleChrome => self.blink_feature_identifiers(), AppleSafari => self.webkit_feature_identifiers(), Opera => &self.feature_detail.blink_feature_identifiers[..], AppleSafariIOs => self.webkit_feature_identifiers(), OperaMini => &NoNoneIdentifiers[..], GoogleAndroidBrowserAndWebComponent => self.webkit_feature_identifiers(), Blackberry => self.webkit_feature_identifiers(), OperaMobile => self.blink_feature_identifiers(), GoogleChromeAndroid => self.blink_feature_identifiers(), MozillaFirefoxAndroid => self.firefox_feature_identifiers(), MicrosoftInternetExplorerMobile => self.internet_explorer_feature_identifiers(), UcBrowserAndroid => self.webkit_feature_identifiers(), SamsungBrowserAndroid => self.webkit_feature_identifiers(), QqBrowserAndroid => self.webkit_feature_identifiers(), BaiduBrowserAndroid => self.webkit_feature_identifiers(), Unknown(_) => &NoNoneIdentifiers[..], _ => &NoNoneIdentifiers[..], } } /// Identifiers to related MSDN sections. #[inline(always)] pub fn internet_explorer_feature_identifiers(&self) -> &'a [String] { &self.feature_detail.internet_explorer_feature_identifiers[..] } /// Identifiers to related blink (Google Chrome) bugs #[inline(always)] pub fn blink_feature_identifiers(&self) -> &'a [String] { &self.feature_detail.blink_feature_identifiers[..] } /// Identifiers to related Mozilla Firefox bugs #[inline(always)] pub fn firefox_feature_identifiers(&self) -> &'a [String] { &self.feature_detail.firefox_feature_identifiers[..] } /// Identifiers to related WebKit bugs #[inline(always)] pub fn webkit_feature_identifiers(&self) -> &'a [String] { &self.feature_detail.webkit_feature_identifiers[..] } /// Should any prefix be in uppercase? /// Extremely rarely used by the caniuse.com database. #[inline(always)] pub fn upper_case_prefix(&self) -> bool { self.feature_detail.upper_case_prefix } /// Effectively, is this feature in 'draft' form? /// Extremely rarely used, if at all, by the caniuse.com database, and only for extremely recent features. #[inline(always)] pub fn this_feature_is_not_yet_complete_or_accurate(&self) -> bool { !self.feature_detail.shown } }
package main import ( "fmt" "github.com/fetchrobotics/rosgo/ros" "log" "os" ) func main() { node, err := ros.NewNode("/test_param", os.Args) if err != nil { fmt.Println(err) os.Exit(-1) } defer node.Shutdown() if hasParam, err := node.HasParam("/rosdistro"); err != nil { log.Fatalf("HasParam failed: %v", err) } else { if !hasParam { log.Fatal("HasParam() failed.") } } if foundKey, err := node.SearchParam("rosdistro"); err != nil { log.Fatalf("SearchParam failed: %v", err) } else { if foundKey != "/rosdistro" { log.Fatal("SearchParam() failed.") } } if param, err := node.GetParam("/rosdistro"); err != nil { log.Fatalf("GetParam: %v", err) } else { if value, ok := param.(string); !ok { log.Fatal("GetParam() failed.") } else { if value != "kinetic\n" { log.Fatalf("Expected 'kinetic\\n' but '%s'", value) } } } if err := node.SetParam("/test_param", 42); err != nil { log.Fatalf("SetParam failed: %v", err) } if param, err := node.GetParam("/test_param"); err != nil { log.Fatalf("GetParam failed: %v", err) } else { if value, ok := param.(int32); ok { if value != 42 { log.Fatalf("Expected 42 but %d", value) } } else { log.Fatal("GetParam('/test_param') failed.") } } if err := node.DeleteParam("/test_param"); err != nil { log.Fatalf("DeleteParam failed: %v", err) } log.Print("Success") }
from cosmosis.datablock import names, option_section from time_delay_likelihood import TimeDelayLikelihood, B1608, RXJ1131, HE0435 import numpy as np def setup(options): lens_name = options.get_string(option_section, "lens_name", "B1608") if lens_name.upper() == "B1608": like = [B1608()] elif lens_name.upper() == "RXJ1131": like = [RXJ1131()] elif lens_name.upper() == "HE0435": like = [HE0435()] elif lens_name == "mock": try: fname = options[option_section, "filename"] lambdad = options[option_section, "lambdaD"] like = TimeDelayLikelihood.load_catalog( fname, lambdad) # rtns array of instances of class except: raise ValueError("Error in reading mocks") return like def execute(block, config): data_class = config z_m = block[names.distances, "z"] d_m = block[names.distances, "d_m"] omega_k = block[names.cosmological_parameters, "omega_k"] H0 = block[names.cosmological_parameters, "hubble"] def comovingDistance(z): return np.interp(z, z_m, d_m) if len(data_class) > 1: like = 0 for d in data_class: like = like + d.likelihood(comovingDistance, omega_k, H0) like_name = d.name + "_LIKE" else: data_class = data_class[0] like = data_class.likelihood(comovingDistance, omega_k, H0) like_name = data_class.name + "_LIKE" if np.isnan(like): like = -np.inf block[names.likelihoods, like_name] = like return 0
def saveCSV(data, path=None, fileName=None): if fileName is None: now = datetime.now() fileName = datetime.timestamp(now) path = makeDir(path) with open(path + '/' + str(fileName) + '.txt', 'w') as csvFile: csvWriter = csv.writer(csvFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) if isinstance(data[0], (list, np.ndarray)): for ind in range(len(data)): csvWriter.writerow([data[ind][ind2] for ind2 in range(len(data[ind]))]) elif isinstance(data[0], (float, int, np.complex128)): csvWriter.writerow([data[ind2] for ind2 in range(len(data))]) return path
An 'Ever Closer Union' of National Policy? The Convergence of National Environmental Policy in the European Union This article examines how far the EU has succeeded in encouraging the content, structure and style of national environmental policies to converge. Using fresh empirical evidence collected from 10 national environmental policies, it presents a very mixed pattern of change, with some elements of national policy converging more rapidly (and through different causal pathways) than others. These findings are viewed through the lens of popular theories of structural convergence, integration and Europeanization respectively, to reach a fuller understanding of the pathways and outcomes of change in an enlarging EU. Copyright © 2005 John Wiley & Sons, Ltd and ERP Environment.
def progress(self, batchId): base = "[{}/{} ({:.0f}%)]" if hasattr(self.dataLoader, "numberOfSamples"): current = batchId * self.dataLoader.batch_size total = self.dataLoader.numberOfSamples else: current = batchId total = self.epochLength return base.format(current, total, 100.0 * current/total)
def generate_tosca(self, package, package_set, tosca_meta_version="1.0", csar_version="1.0"): tosca = {"TOSCA-Meta-Version": tosca_meta_version, "CSAR-Version": csar_version, "Created-By": package_set.maintainer, "Entry-Definitions": package.descriptor_file["filename"]} return [tosca]
<filename>model-camt-types/src/generated/java/com/prowidesoftware/swift/model/mx/dic/PaymentReturnCriteria3.java package com.prowidesoftware.swift.model.mx.dic; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; /** * Defines the criteria used to report on the payment. * * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "PaymentReturnCriteria3", propOrder = { "msgIdInd", "reqdExctnDtInd", "instrInd", "instrStsRtrCrit", "instdAmtInd", "cdtDbtInd", "intrBkSttlmAmtInd", "prtyInd", "prcgVldtyTmInd", "purpInd", "instrCpyInd", "pmtMTInd", "pmtTpInd", "txIdInd", "intrBkSttlmDtInd", "endToEndIdInd", "pmtMtdInd", "dbtrInd", "dbtrAgtInd", "instgRmbrsmntAgtInd", "instdRmbrsmntAgtInd", "intrmyInd", "cdtrAgtInd", "cdtrInd" }) public class PaymentReturnCriteria3 { @XmlElement(name = "MsgIdInd") protected Boolean msgIdInd; @XmlElement(name = "ReqdExctnDtInd") protected Boolean reqdExctnDtInd; @XmlElement(name = "InstrInd") protected Boolean instrInd; @XmlElement(name = "InstrStsRtrCrit") protected InstructionStatusReturnCriteria instrStsRtrCrit; @XmlElement(name = "InstdAmtInd") protected Boolean instdAmtInd; @XmlElement(name = "CdtDbtInd") protected Boolean cdtDbtInd; @XmlElement(name = "IntrBkSttlmAmtInd") protected Boolean intrBkSttlmAmtInd; @XmlElement(name = "PrtyInd") protected Boolean prtyInd; @XmlElement(name = "PrcgVldtyTmInd") protected Boolean prcgVldtyTmInd; @XmlElement(name = "PurpInd") protected Boolean purpInd; @XmlElement(name = "InstrCpyInd") protected Boolean instrCpyInd; @XmlElement(name = "PmtMTInd") protected Boolean pmtMTInd; @XmlElement(name = "PmtTpInd") protected Boolean pmtTpInd; @XmlElement(name = "TxIdInd") protected Boolean txIdInd; @XmlElement(name = "IntrBkSttlmDtInd") protected Boolean intrBkSttlmDtInd; @XmlElement(name = "EndToEndIdInd") protected Boolean endToEndIdInd; @XmlElement(name = "PmtMtdInd") protected Boolean pmtMtdInd; @XmlElement(name = "DbtrInd") protected Boolean dbtrInd; @XmlElement(name = "DbtrAgtInd") protected Boolean dbtrAgtInd; @XmlElement(name = "InstgRmbrsmntAgtInd") protected Boolean instgRmbrsmntAgtInd; @XmlElement(name = "InstdRmbrsmntAgtInd") protected Boolean instdRmbrsmntAgtInd; @XmlElement(name = "IntrmyInd") protected Boolean intrmyInd; @XmlElement(name = "CdtrAgtInd") protected Boolean cdtrAgtInd; @XmlElement(name = "CdtrInd") protected Boolean cdtrInd; /** * Gets the value of the msgIdInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isMsgIdInd() { return msgIdInd; } /** * Sets the value of the msgIdInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setMsgIdInd(Boolean value) { this.msgIdInd = value; return this; } /** * Gets the value of the reqdExctnDtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isReqdExctnDtInd() { return reqdExctnDtInd; } /** * Sets the value of the reqdExctnDtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setReqdExctnDtInd(Boolean value) { this.reqdExctnDtInd = value; return this; } /** * Gets the value of the instrInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isInstrInd() { return instrInd; } /** * Sets the value of the instrInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setInstrInd(Boolean value) { this.instrInd = value; return this; } /** * Gets the value of the instrStsRtrCrit property. * * @return * possible object is * {@link InstructionStatusReturnCriteria } * */ public InstructionStatusReturnCriteria getInstrStsRtrCrit() { return instrStsRtrCrit; } /** * Sets the value of the instrStsRtrCrit property. * * @param value * allowed object is * {@link InstructionStatusReturnCriteria } * */ public PaymentReturnCriteria3 setInstrStsRtrCrit(InstructionStatusReturnCriteria value) { this.instrStsRtrCrit = value; return this; } /** * Gets the value of the instdAmtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isInstdAmtInd() { return instdAmtInd; } /** * Sets the value of the instdAmtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setInstdAmtInd(Boolean value) { this.instdAmtInd = value; return this; } /** * Gets the value of the cdtDbtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isCdtDbtInd() { return cdtDbtInd; } /** * Sets the value of the cdtDbtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setCdtDbtInd(Boolean value) { this.cdtDbtInd = value; return this; } /** * Gets the value of the intrBkSttlmAmtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isIntrBkSttlmAmtInd() { return intrBkSttlmAmtInd; } /** * Sets the value of the intrBkSttlmAmtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setIntrBkSttlmAmtInd(Boolean value) { this.intrBkSttlmAmtInd = value; return this; } /** * Gets the value of the prtyInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPrtyInd() { return prtyInd; } /** * Sets the value of the prtyInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setPrtyInd(Boolean value) { this.prtyInd = value; return this; } /** * Gets the value of the prcgVldtyTmInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPrcgVldtyTmInd() { return prcgVldtyTmInd; } /** * Sets the value of the prcgVldtyTmInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setPrcgVldtyTmInd(Boolean value) { this.prcgVldtyTmInd = value; return this; } /** * Gets the value of the purpInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPurpInd() { return purpInd; } /** * Sets the value of the purpInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setPurpInd(Boolean value) { this.purpInd = value; return this; } /** * Gets the value of the instrCpyInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isInstrCpyInd() { return instrCpyInd; } /** * Sets the value of the instrCpyInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setInstrCpyInd(Boolean value) { this.instrCpyInd = value; return this; } /** * Gets the value of the pmtMTInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPmtMTInd() { return pmtMTInd; } /** * Sets the value of the pmtMTInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setPmtMTInd(Boolean value) { this.pmtMTInd = value; return this; } /** * Gets the value of the pmtTpInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPmtTpInd() { return pmtTpInd; } /** * Sets the value of the pmtTpInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setPmtTpInd(Boolean value) { this.pmtTpInd = value; return this; } /** * Gets the value of the txIdInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isTxIdInd() { return txIdInd; } /** * Sets the value of the txIdInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setTxIdInd(Boolean value) { this.txIdInd = value; return this; } /** * Gets the value of the intrBkSttlmDtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isIntrBkSttlmDtInd() { return intrBkSttlmDtInd; } /** * Sets the value of the intrBkSttlmDtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setIntrBkSttlmDtInd(Boolean value) { this.intrBkSttlmDtInd = value; return this; } /** * Gets the value of the endToEndIdInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isEndToEndIdInd() { return endToEndIdInd; } /** * Sets the value of the endToEndIdInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setEndToEndIdInd(Boolean value) { this.endToEndIdInd = value; return this; } /** * Gets the value of the pmtMtdInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPmtMtdInd() { return pmtMtdInd; } /** * Sets the value of the pmtMtdInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setPmtMtdInd(Boolean value) { this.pmtMtdInd = value; return this; } /** * Gets the value of the dbtrInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isDbtrInd() { return dbtrInd; } /** * Sets the value of the dbtrInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setDbtrInd(Boolean value) { this.dbtrInd = value; return this; } /** * Gets the value of the dbtrAgtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isDbtrAgtInd() { return dbtrAgtInd; } /** * Sets the value of the dbtrAgtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setDbtrAgtInd(Boolean value) { this.dbtrAgtInd = value; return this; } /** * Gets the value of the instgRmbrsmntAgtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isInstgRmbrsmntAgtInd() { return instgRmbrsmntAgtInd; } /** * Sets the value of the instgRmbrsmntAgtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setInstgRmbrsmntAgtInd(Boolean value) { this.instgRmbrsmntAgtInd = value; return this; } /** * Gets the value of the instdRmbrsmntAgtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isInstdRmbrsmntAgtInd() { return instdRmbrsmntAgtInd; } /** * Sets the value of the instdRmbrsmntAgtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setInstdRmbrsmntAgtInd(Boolean value) { this.instdRmbrsmntAgtInd = value; return this; } /** * Gets the value of the intrmyInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isIntrmyInd() { return intrmyInd; } /** * Sets the value of the intrmyInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setIntrmyInd(Boolean value) { this.intrmyInd = value; return this; } /** * Gets the value of the cdtrAgtInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isCdtrAgtInd() { return cdtrAgtInd; } /** * Sets the value of the cdtrAgtInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setCdtrAgtInd(Boolean value) { this.cdtrAgtInd = value; return this; } /** * Gets the value of the cdtrInd property. * * @return * possible object is * {@link Boolean } * */ public Boolean isCdtrInd() { return cdtrInd; } /** * Sets the value of the cdtrInd property. * * @param value * allowed object is * {@link Boolean } * */ public PaymentReturnCriteria3 setCdtrInd(Boolean value) { this.cdtrInd = value; return this; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } @Override public boolean equals(Object that) { return EqualsBuilder.reflectionEquals(this, that); } @Override public int hashCode() { return HashCodeBuilder.reflectionHashCode(this); } }
<reponame>jrbeverly/JCompiler // PARSER_WEEDER // JOOS1: PARSER_EXCEPTION // JOOS2: PARSER_EXCEPTION // JAVAC: /** * Parser/weeder: * - Type long not allowed in Joos. */ public class Je_1_JoosTypes_Long { public Je_1_JoosTypes_Long() {} public static int test() { long y = (long) 42; return 123; } }
import Discord from 'discord.js'; import { getPracticeCategory, getPracticeCategoryVoiceChannels, getPracticeCategoryTextChannels, } from './categories'; import { environment } from '../../environment'; import { createIterable } from '../arrayUtils'; import { MAX_EMPTY_UNLOCKED_ROOMS } from './constants'; import { getNewUnlockedChannelName } from './channels'; export async function initialiseCategoryAndChannels(manager: Discord.GuildChannelManager) { let currentManager = manager; const existingCategory = getPracticeCategory(manager); if (!existingCategory) { const category = await manager.create(environment.channel_category, { type: 'category' }); currentManager = category.guild.channels; } currentManager = (await setupVoiceChannels(currentManager)) ?? currentManager; currentManager = (await setupTextChannels(currentManager)) ?? currentManager; const readMeChannel = await currentManager.create('read-me', { type: 'text', parent: existingCategory, bitrate: environment.default_bitrate * 1000, }); await readMeChannel.setPosition(0); await readMeChannel.overwritePermissions([ { id: currentManager.guild.id, deny: 'SEND_MESSAGES' }, ]); const response = new Discord.MessageEmbed() .addField( 'Practice Room Bot', `All voice channels named "Practice-Room" can be locked, so that others remain muted while you practice`, ) .addField( 'How to lock room', "When no one is in a practice room and you'd like to occupy it, use the command `p!lock` to mute everyone except for you within your particular practice room\n\n" + "Please remember to type `p!unlock` as soon as you're finished\n\n" + "Keep in mind that when you join a locked practice room, it will automatically server mute you unless you're the host of that particular practice room", ) .addField( "Don't want to practice solo?", 'If you want to chill out or jam, go to the JAM ROOMS category', ); readMeChannel.send(response); currentManager = readMeChannel.guild.channels; return currentManager; } async function setupVoiceChannels(manager: Discord.GuildChannelManager) { let lastManager = manager; const existingChannels = getPracticeCategoryVoiceChannels(lastManager); const deleteChannelsReq = existingChannels?.map(async (e) => await e.delete()); if (deleteChannelsReq) { const responses = await Promise.all(deleteChannelsReq); const lastElement = responses.pop(); if (lastElement) { lastManager = lastElement.guild.channels; } } const channelsToBeCreated = createIterable(MAX_EMPTY_UNLOCKED_ROOMS); const usedChannelNames = []; const practiceCategory = getPracticeCategory(manager); for (const _ in channelsToBeCreated) { const newChannelName = getNewUnlockedChannelName(usedChannelNames); if (!newChannelName) { return; } lastManager = ( await manager.create(newChannelName, { type: 'voice', parent: practiceCategory, bitrate: environment.default_bitrate * 1000, }) ).guild.channels; usedChannelNames.push(newChannelName); } return lastManager; } async function setupTextChannels(manager: Discord.GuildChannelManager) { let lastManager = manager; const existingChannels = getPracticeCategoryTextChannels(lastManager); const deleteChannelsReq = existingChannels?.map(async (e) => await e.delete()); if (deleteChannelsReq) { const responses = await Promise.all(deleteChannelsReq); const lastElement = responses.pop(); if (lastElement) { lastManager = lastElement.guild.channels; } } const channelsToBeCreated = createIterable(MAX_EMPTY_UNLOCKED_ROOMS); const usedChannelNames = []; const practiceCategory = getPracticeCategory(manager); for (const _ in channelsToBeCreated) { const newChannelName = getNewUnlockedChannelName(usedChannelNames); if (!newChannelName) { return; } lastManager = ( await manager.create(newChannelName, { type: 'text', parent: practiceCategory, bitrate: environment.default_bitrate * 1000, }) ).guild.channels; usedChannelNames.push(newChannelName); } return lastManager; }
A Likelihood Ratio Test of Stationarity Based on a Correlated Unobserved Components Model We propose a likelihood ratio (LR) test of stationarity based on a widely-used correlated unobserved components model. We verify the asymptotic distribution and consistency of the LR test, while a bootstrap version of the test is at least first-order accurate. Given empiricallyrelevant processes estimated from macroeconomic data, Monte Carlo analysis reveals that the bootstrap version of the LR test has better small-sample size control and higher power than commonly used bootstrap Lagrange multiplier (LM) tests, even when the correct parametric structure is specified for the LM test. A key feature of our proposed LR test is its allowance for correlation between permanent and transitory movements in the time series under consideration, which increases the power of the test given the apparent presence of non-zero correlations for many macroeconomic variables. Based on the bootstrap LR test, and in some cases contrary to the bootstrap LM tests, we can reject trend stationarity for U.S. real GDP, the unemployment rate, consumer prices, and payroll employment in favor of nonstationary processes with volatile stochastic trends.
<filename>src/main/java/epicsquid/roots/tileentity/TileEntityIncenseBurnerRenderer.java package epicsquid.roots.tileentity; import java.util.Random; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.RenderItem; import net.minecraft.client.renderer.block.model.ItemCameraTransforms; import net.minecraft.client.renderer.tileentity.TileEntitySpecialRenderer; import net.minecraft.item.ItemStack; public class TileEntityIncenseBurnerRenderer extends TileEntitySpecialRenderer<TileEntityIncenseBurner> { @Override public void render(TileEntityIncenseBurner tei, double x, double y, double z, float partialTicks, int destroyStage, float alpha) { if (tei.inventory.getStackInSlot(0) != ItemStack.EMPTY) { RenderItem r = Minecraft.getMinecraft().getRenderItem(); GlStateManager.pushMatrix(); GlStateManager.translate(x + 0.5, y + 0.575, z + 0.35); GlStateManager.rotate(90f, 1.0f, 0, 0); Random random = new Random(); random.setSeed(tei.inventory.getStackInSlot(0).hashCode() + 256); GlStateManager.disableLighting(); r.renderItem(tei.inventory.getStackInSlot(0), ItemCameraTransforms.TransformType.GROUND); GlStateManager.enableLighting(); GlStateManager.popMatrix(); } } }
/** * Envelope command * Implements undo/do functions for creating, adding to, and removing from envelopes * @author andre */ class EnvelopeCommand extends Command { /** * Default constructor accepting parameters required to configure the undo * and do functions for an envelope command * @param mzTree MzTree upon which to perform undo/do functions * @param newEnvelopeID envelope ID to set for each trace specified by traceIDs * @param traceIDs IDs of traces to updated */ public EnvelopeCommand(MzTree mzTree, int newEnvelopeID, Integer[] traceIDs) { // do command: set traces specified by traceIDs to have newEnvelopeID this.doCommand = () -> { mzTree.updateEnvelopes(newEnvelopeID, traceIDs); }; // resolve the oldEnvelopeID for the undo command // due to the yellow rule this can only be 0, or the same envelopeID, for all points int oldEnvelopeID = mzTree.traceMap.getOrDefault(traceIDs[0], 0); // undo command: set points' specified to pointIDs to have their previous traceID this.undoCommand = () -> { mzTree.updateEnvelopes(oldEnvelopeID, traceIDs); }; } }
import { computed, customElement, property } from '@polymer/decorators' import '@polymer/paper-dialog/paper-dialog' import { html, PolymerElement } from '@polymer/polymer' import { IHydraResource } from 'alcaeus/types/Resources' // import 'bower:show-json/show-json.html' import { decycle } from '../../lib/decycle' @customElement('resource-json') export default class ResourceJson extends PolymerElement { @property({ type: Object }) public resource: IHydraResource @computed('resource') public get _decycledResource(): object { return decycle(this.resource) } public show() { (this.$.dialog as any).open() } public static get template() { return html` <style> paper-dialog { max-width: 90%; overflow: scroll; </style> <paper-dialog id="dialog" with-backdrop> <show-json json="[[_decycledResource]]" hide-copy-button></show-json> </paper-dialog> ` } }
/** * Broker for net application * @param <M> */ public class OnlineBroker<M> implements Broker<M> { private final LocalBroker<M> localBroker; private final Configure<M> configure; /** * @param localBroker @{@link LocalBroker} * @param configure @{@link Configure} */ public OnlineBroker(LocalBroker<M> localBroker, Configure<M> configure) { configure.registryLocalBroker(localBroker); this.localBroker = localBroker; this.configure = configure; } @Override public void onBroker(M m, User<M> u) { configure.options(() -> localBroker.onBroker(m, u)); configure.onBroker(m, u.id()); } @Override public void onBroker(M m, User<M> from, String... to) { configure.options(() -> localBroker.onBroker(m, from, to)); configure.onBroker(m, from.id(), to); } @Override public void registryUser(User<M> user) throws UserAlreadyRegistryException { localBroker.registryUser(user); } @Override public void removeUser(User<M> user) { localBroker.removeUser(user); } }
package com.ysl.stepone.fragment; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.recyclerview.widget.RecyclerView; import com.stepone.component.image.MediaFetcher; import com.stepone.uikit.view.tableview.ClazzViewModel; import com.stepone.uikit.view.tableview.GridRecyclerViewAdapter; import com.stepone.uikit.view.tableview.ResViewModel; import com.stepone.uikit.view.tableview.ViewHolder; import com.stepone.uikit.view.tableview.ViewModel; import com.stepone.uikit.view.utils.DisplayUtils; import com.ysl.stepone.R; import java.util.ArrayList; import java.util.List; /** * FileName: FirstFragment * Author: y.liang * Date: 2019-12-05 11:08 */ public class FirstFragment extends BaseFragment { private RecyclerView mTableView; private GridRecyclerViewAdapter mAdapter; @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.i("TT", getActivity().toString()); } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); getSTActivity().setPageTitle("FIRST VIEW"); } @SuppressLint("WrongConstant") @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_first_tableview, container, false); mTableView = view.findViewById(R.id.tableview); mAdapter = new GridRecyclerViewAdapter(mTableView, 4); // mAdapter.setNeedReverseLayout(true); // mAdapter.setLayoutOrientation(RecyclerView.HORIZONTAL); mAdapter.setItemSpaceInRow(20, GridRecyclerViewAdapter.SPACE_STRATEGY_ALL, null); view.post(new Runnable() { @Override public void run() { buildData(); } }); return view; } @Override public int onCreateView() { return R.layout.fragment_first_tableview; } private void buildDatasource() { for (int i = 0; i < 55; i++) { if (i % 3 == 0) { GapVM gapVM = new GapVM(i); gapVM.setSpanSize(2); gapVM.setFullSpan(true); gapVM.setUseAutoAverageSpace(false); gapVM.bottomDividerHieght = DisplayUtils.dp2px(getSTActivity(), 1); gapVM.bottomDivider = new ColorDrawable(Color.RED); gapVM.rightDividerWidth = 1; gapVM.rightDivider = new ColorDrawable(Color.GREEN); mAdapter.append(gapVM); } else { TestVM vm = new TestVM(i); vm.setSpanSize(((i % 3))); // vm.setSpanSize(1); vm.setItemClickListener(new ViewModel.OnClickListener() { @Override public void onClick(View view, ViewModel viewModel) { TestVM testVM = (TestVM) viewModel; Toast.makeText(getContext(), "TAP item at index "+testVM.getPayload(), Toast.LENGTH_SHORT).show(); buildData(); } }); vm.bottomDividerHieght = 1; vm.bottomDividerLeftInset = 1; mAdapter.append(vm); } } } private void buildData() { List<TestVM> list = new ArrayList<>(); for (int i = 0; i < 9; i++) { TestVM vm = new TestVM(i); vm.setSpanSize((1)); vm.setItemClickListener(new ViewModel.OnClickListener() { @Override public void onClick(View view, ViewModel viewModel) { TestVM testVM = (TestVM) viewModel; Toast.makeText(getContext(), "TAP item at index "+testVM.getPayload(), Toast.LENGTH_SHORT).show(); // buildDatasource(); // mAdapter.remove(viewModel); new MediaFetcher().fetch(getSTActivity()); } }); vm.bottomDividerHieght = 1; vm.bottomDividerLeftInset = 1; list.add(vm); } mAdapter.setData(list); } private static class TestVM extends ResViewModel<Integer, ViewHolder> { TestVM(int i) { super(R.layout.cell_card); setPayload(i); } @Override protected void onInitializeView(@NonNull ViewHolder holder) { } @Override protected void onWillDisplayView(@NonNull ViewHolder holder) { } @Override protected void onDisplayView(@NonNull ViewHolder holder) { holder.setText(R.id.title_view, getPayload()+""); } } private static class GapVM extends ClazzViewModel<Integer> { private GapVM(int i) { super(GapCell.class); setPayload(i); } private static class GapCell extends ClazzViewModel.ViewCell<GapVM> { public GapCell(@NonNull Context context) { super(context); ViewGroup.LayoutParams params = getLayoutParams(); params.height = DisplayUtils.dp2px(context, 44); setBackgroundColor(Color.GRAY); } @Override protected void onInitialize(@NonNull GapVM viewModel) { } @Override protected void onWillDisplay(@NonNull GapVM viewModel) { } @Override protected void onDisplay(@NonNull GapVM viewModel) { } } } }
An index of fatal toxicity for new psychoactive substances An index of fatal toxicity for new psychoactive substances has been developed based solely on information provided on death certificates. An updated index of fatal toxicity (Tf), as first described in 2010, was calculated based on the ratio of deaths to prevalence and seizures for the original five substances (amphetamine, cannabis, cocaine/crack, heroin and 3,4-methylenedioxymethylamphetamine)*. These correlated well with the 2010 index. Deaths were then examined for cases both where the substance was and was not found in association with other substances. This ratio (sole to all mentions; S/A) was then calculated for deaths in the period 1993 to 2016. This new measure of fatal toxicity, expressed by S/A, was well-correlated with the index Ln (Tf) of the original reference compounds. The calculation of S/A was then extended to a group of new psychoactive substances where insufficient prevalence or seizure data were available to directly determine a value of Tf by interpolation of a graph of Tf versus S/A. Benzodiazepine analogues had particularly low values of S/A and hence Tf. By contrast, γ-hydroxybutyrate/γ-butyrolactone, α-methyltryptamine, synthetic cannabinoid receptor agonists and benzofurans had a higher fatal toxicity.
module DataRefTest where import Prelude hiding (Maybe (..), Either (..)) data Maybe a = Just a | Nothing data Either a b = Left a | Right b {-@ addMaybe :: Maybe Int -> y:Int -> Maybe {z:Int | z > y} @-} addMaybe :: Maybe Int -> Int -> Maybe Int addMaybe (Just x) y = Just (x + y) addMaybe _ _ = Nothing {-@ addMaybe2 :: Maybe {x:Int | x >= 0 } -> {y:Int | y >= 0} -> Maybe {z:Int | z > y} @-} addMaybe2 :: Maybe Int -> Int -> Maybe Int addMaybe2 (Just x) y = Just (x + y) addMaybe2 _ _ = Nothing {-@ measure isJust @-} isJust :: Maybe a -> Bool isJust (Just _) = True isJust _ = False {-@ measure isNothing @-} isNothing :: Maybe a -> Bool isNothing Nothing = True isNothing _ = False getLeftInts :: Either Int Int -> Int getLeftInts = getLeft getLeft :: Either a b -> a getLeft (Left x) = x getLeft _ = die 0 {-@ sumSameInts :: e:Either Int Int -> {e2:Either Int Int | isLeft e => isLeft e2} -> Either Int Int @-} sumSameInts :: Either Int Int -> Either Int Int -> Either Int Int sumSameInts = sumSame {-@ sumSame :: (Num a, Num b) => e:Either a b -> {e2:Either a b | isLeft e => isLeft e2} -> Either a b @-} sumSame :: (Num a, Num b) => Either a b -> Either a b -> Either a b sumSame (Left x) (Left y) = Left (x + y) sumSame (Right x) (Right y) = Right (x + y) sumSame _ _ = die 0 {-@ measure isLeft @-} isLeft :: Either a b -> Bool isLeft (Left _) = True isLeft _ = False {-@ measure isRight @-} isRight :: Either a b -> Bool isRight (Right _) = True isRight _ = False {-@ die :: {x:Int | false} -> a @-} die :: Int -> a die x = error "die" {-@ sub1 :: Num a => a -> {y:a | y >= 0} @-} sub1 :: Num a => a -> a sub1 x = x - 1 {-@ id2 :: Num a => x:a -> {y:a | y >= 0} @-} id2 :: Num a => a -> a id2 z = z {-@ const4 :: Num a => {y:a | y >= 0} @-} const4 :: Num a => a const4 = -4 {-@ check :: (Num a, Num b) => x:a -> y:b -> {y:b | y <= x || y > x} @-} check :: (Num a, Num b) => a -> b -> b check x y = y -- 800 fI :: Num a => a fI = -1
/* -*- c++ -*- */ /* * Copyright 2016 Analog Devices Inc. * Author: Paul Cercueil <[email protected]> * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3, or (at your option) * any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this software; see the file COPYING. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, * Boston, MA 02110-1301, USA. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #define _USE_MATH_DEFINES #include <cmath> #include "iio_math_impl.h" #ifdef GR_VERSION_3_7_OR_LESS #include <gnuradio/analog/sig_source_f.h> #else #include <gnuradio/analog/sig_source.h> #endif #include <gnuradio/analog/sig_source_waveform.h> #include <gnuradio/io_signature.h> using namespace gr; using namespace gr::iio; iio_math_gen::sptr iio_math_gen::make(double sampling_freq, double wav_freq, const std::string &function) { return gnuradio::get_initial_sptr(new iio_math_gen_impl( sampling_freq, wav_freq, function)); } iio_math_gen_impl::iio_math_gen_impl(double sampling_freq, double wav_freq, const std::string &function) : hier_block2("math_gen", io_signature::make(0, 0, 0), io_signature::make(1, 1, sizeof(float))) { src_block = analog::sig_source_f::make(sampling_freq / wav_freq, analog::GR_SAW_WAVE, 1, 2.0 * M_PI, -M_PI); int ret = parse_function(function); if (ret) throw std::runtime_error("Invalid function"); cleanup(); src_block.reset(); } gr::basic_block_sptr iio_math_gen_impl::get_src_block() { return src_block; } void iio_math_gen_impl::connect_to_output(gr::basic_block_sptr block, unsigned int port) { basic_block_sptr hier = shared_from_this(); connect(block, 0, hier, 0); }
#!/usr/bin/python3 import tkinter as tk import turtle turtle.bgcolor("black") from tkinter.filedialog import askopenfilename import ast t = None wn = None def create_l_system(iters, axiom, rules): start_string = axiom if iters == 0: return axiom end_string = "" for _ in range(iters): end_string = "".join(rules[i] if i in rules else i for i in start_string) start_string = end_string return end_string def draw_l_system(t, instructions, angle, distance): t.color("#03fcdf", "green") for cmd in instructions: if cmd == 'F': t.forward(distance) elif cmd == '+': t.right(angle) elif cmd == '-': t.left(angle) def draw(iterations, axiom, rules, angle, length=8, size=3, y_offset=0, x_offset=0, offset_angle=0, width=800, height=800): inst = create_l_system(iterations, axiom, rules) global t global wn if t == None: t = turtle.Turtle() wn = turtle.Screen() #wn.setup(width, height) t.up() t.backward(-x_offset) t.left(90) t.backward(-y_offset) t.left(offset_angle) t.down() t.speed(0) t.pensize(size) draw_l_system(t, inst, angle, length) t.hideturtle() axiom = "F--F--F" rules = {"F":"F+F--F+F"} iterations = 4 # TOP: 7 angle = 60 class Application(tk.Frame): def __init__(self, master=None): super().__init__(master) self.master = master self.grid() self.create_widgets() self.axiom = None self.rules = None self.iterations = None self.angle = None self.length = None def create_widgets(self): self.hi_there = tk.Button(self) self.hi_there["text"] = "Provide the L-system" self.hi_there.grid(row = 1, column = 0) self.FN = tk.Button(self, text="....") self.FN.grid(row = 1, column = 2) tk.Label(self, text="Axiom").grid(row=2) tk.Label(self, text="Rules").grid(row=3) tk.Label(self, text="Iterations").grid(row=4) tk.Label(self, text="Angle").grid(row=5) tk.Label(self, text="Side Length").grid(row=6) self.Axiom= tk.Text(self,height=1,width = 20,) self.Axiom.grid(row = 2, column = 1) self.Axiom.insert('1.0', 'F--F--F') self.Rules = tk.Text(self,height=1, width = 50) self.Rules.grid(row = 3, column = 1 ) self.Rules.insert('1.0', '{"F":"F+F--F+F"}') self.Iterations= tk.Text(self,height=1,width = 10) self.Iterations.grid(row = 4, column = 1) self.Iterations.insert('1.0', '3') self.Angle = tk.Text(self,height=1, width = 10) self.Angle.grid(row = 5, column = 1 ) self.Angle.insert('1.0', '60') self.Len = tk.Text(self,height=1, width = 10) self.Len.grid(row = 6, column = 1 ) self.Len.insert('1.0', '3') self.Draw = tk.Button(self, text="Draw", fg="Green", command=self.draw) self.Draw.grid(row = 6, column = 2) self.quit = tk.Button(self, text="QUIT", fg="red", command=self.Quit) self.quit.grid(row = 7, column = 1) tk.Label(self,text="").grid(row=8) tk.Label(self,text="").grid(row=9) #tk.Label(self,text="").grid(row=10) #tk.Label(self,text="").grid(row=11) #tk.Label(self,text="").grid(row=12) #tk.Label(self,text="").grid(row=13) Rules = tk.Label(self, text=" A simple tool to draw fractal from it's L-system description. \ \n Axiom: The initial string for the generation. \n Rules: A set of \ production rules. Input is taken in form of Python Dictionaries.\n Angle: Required Angle of rotation for the fractals. \n \ Side Length: Length of each side of the Fractals.\ \n Sample inputs for Koch-Snowflake are given in the Input box.\n \ Sample input for Piano-Gosper-Curve is: \n \ Axiom = FX\n \ Rules = {\"X\":\"X+YF++YF-FX--FXFX-YF+\", \"Y\":\"-FX+YFYF++YF+FX--FX-Y\"} \n \ Iterations = 4 \n \ Angle = 60 \n \ Side Length = 3" ) Rules.grid(row=14,columnspan = 6) Rules.config(bg="gray", fg='white', font=("Courier",10)) def Quit(self): global t turtle.bye() self.master.destroy() def getRules(self): self.axiom = self.Axiom.get("1.0","end") self.rules = ast.literal_eval(self.Rules.get("1.0","end")) self.iterations = int(self.Iterations.get("1.0","end")) self.angle = float(self.Angle.get("1.0","end")) self.length = int(self.Len.get("1.0","end")) def draw(self): global t self.getRules() if t != None: t.clear() if self.axiom == None or self.rules == None or self.iterations == None or self.angle == None or self.length == None: self.msg.config(text="select all values") else: draw(self.iterations, self.axiom, self.rules, self.angle, length= self.length , size=2, y_offset=0,x_offset=0, offset_angle=0, width=800, height=800) def decompress(self,): if self.fileName == None: self.msg.config(text="select a File") else: decompress(self.fileName, 'f') self.msg.config(text="decompressing "+ self.fileName) root = tk.Tk() app = Application(master=root) root.geometry("750x500") root.title("Happy L Fractals") app.mainloop()
#include "chunk.h" #include <glm/gtc/matrix_transform.hpp> #include "globals.h" #include "camera.h" #include <iostream> using std::cout; using std::endl; using std::ostream; using namespace Noise; int Chunk::numChunks = 0; ostream& operator<<(ostream &os, const vec3 &vec){ os << vec.x << " " << vec.y << " " << vec.z; return os; } inline float noise1(float x, float z){ /////// // probably not thread safe // fix if making multiple chunkers ////// float y = 0; noise.SetFrequency(0.01f);//0.01 noise.SetFractalOctaves(6);//3 noise.SetFractalLacunarity(2.0f);//2.0 noise.SetFractalGain(0.3f);//0.2 y += 1.5f * noise.GetSimplexFractal(1 * x, 1 * z); y += 0.5f * noise.GetSimplexFractal(2 * x, 2 * z); noise.SetFractalLacunarity(3.0f);//2.0 noise.SetFractalGain(0.3f);//0.4 y += 0.25f * noise.GetSimplexFractal(4 * x, 4 * z); return y; } inline float noise2(float x, float z){ noise.SetFrequency(0.005f);//0.01 noise.SetFractalOctaves(3);//3 noise.SetFractalLacunarity(1.0f);//2.0 //noise.SetFractalLacunarity(2.0f);//2.0 noise.SetFractalGain(10.f);//0.2 return noise.GetSimplexFractal(x, z) + 1.f; } float Chunk::getHeight(float x, float z){ //float y = 1-pow(noise1(x, z), 2); float y = noise2(x, z); y *= noise1(x, z); //y = abs(pow(y, 2)); //y = pow(y, 2.0f); //great for islands //y = 1.f - abs(pow(y, 2)); return y * maxHeight; } Chunk::Chunk(int chunkX_, int chunkZ_): chunkX(chunkX_), chunkZ(chunkZ_) { data = new vec3[sizeof(vec3) * size * size]; indices = new GLushort[sizeof(GLushort) * numIndices]; normals = new vec3[sizeof(vec3) * numIndices]; { int index = 0; int offsetX = chunkX * (size - 1); int offsetZ = chunkZ * (size - 1); for(int j = 0; j < size; ++j){ for(int i = 0; i < size; ++i){ float x = i + offsetX; float z = j + offsetZ; data[index].x = x; data[index].y = getHeight(x * scale, z * scale); data[index].z = z; ++index; } } } { int index = -1; for(int z = 0; z < size - 1; ++z){ for(int x = 0; x < size - 1; ++x){ int start = z * size + x; int TL = start; int TR = start + 1; int BL = start + size; int BR = start + 1 + size; if(abs(data[TR].y - data[BL].y) < abs(data[TL].y - data[BR].y)){ indices[++index] = TR; indices[++index] = BL; indices[++index] = BR; indices[++index] = TR; indices[++index] = TL; indices[++index] = BL; } else { indices[++index] = TL; indices[++index] = BL; indices[++index] = BR; indices[++index] = TR; indices[++index] = TL; indices[++index] = BR; } } } } { for(int i = 0; i < numIndices; i += 3){ vec3 &v0 = data[indices[i + 0]]; vec3 &v1 = data[indices[i + 1]]; vec3 &v2 = data[indices[i + 2]]; vec3 normal = normalize(cross(v1 - v0, v2 - v0)); normals[indices[i + 0]] += normal; normals[indices[i + 1]] += normal; normals[indices[i + 2]] += normal; } for(unsigned int i = 0; i < numIndices; ++i){ normals[i] = normalize(normals[i]); } } } void Chunk::upload(){ //remove? glBindVertexArray(0); glGenVertexArrays(1, &vao); glBindVertexArray(vao); glGenBuffers(1, &vbo); glBindBuffer(GL_ARRAY_BUFFER, vbo); glBufferData(GL_ARRAY_BUFFER, size * size * sizeof(vec3), data, GL_STATIC_DRAW); glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(vec3), 0); glEnableVertexAttribArray(0); glGenBuffers(1, &normalBuffer); glBindBuffer(GL_ARRAY_BUFFER, normalBuffer); glBufferData(GL_ARRAY_BUFFER, numIndices * sizeof(vec3), normals, GL_STATIC_DRAW); glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(vec3), 0); glEnableVertexAttribArray(1); glGenBuffers(1, &ebo); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo); glBufferData(GL_ELEMENT_ARRAY_BUFFER, numIndices * sizeof(GLushort), indices, GL_STATIC_DRAW); delete[] data; delete[] normals; delete[] indices; } Chunk::~Chunk(){ --numChunks; glDeleteBuffers(1, &vbo); glDeleteBuffers(1, &ebo); glDeleteVertexArrays(1, &vao); } void Chunk::render() const { glBindVertexArray(vao); glDrawElements(GL_TRIANGLES, numIndices, GL_UNSIGNED_SHORT, 0); glBindVertexArray(0); }
<gh_stars>1-10 package br.ufscar.dc.gsdr.mfog; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.Serializer; import com.esotericsoftware.kryo.io.ByteBufferInputStream; import com.esotericsoftware.kryo.io.ByteBufferOutputStream; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import com.esotericsoftware.kryonet.Server; import java.io.IOException; import java.io.PipedInputStream; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.*; public class RelayService<T> { protected final int port; protected final Logger log; protected final Class<T> typeParameterClass; protected final Serializer<T> serializer; protected final Kryo kryo; RelayService(Class<T> typeParameterClass, Class<?> caller, int port, Kryo kryo, Serializer<T> serializer) { this.typeParameterClass = typeParameterClass; this.port = port; this.log = Logger.getLogger(this.getClass(), typeParameterClass, caller); this.kryo = kryo; this.serializer = serializer; } class Connection { public SocketChannel socketChannel; public Iterator<T> toSend; // public ByteBuffer outputByteBuffer = ByteBuffer.allocate(10 * 1024); public ByteBufferOutputStream byteBufferOutputStream = new ByteBufferOutputStream(outputByteBuffer); public Output output = new Output(byteBufferOutputStream); // public ByteBuffer inputByteBuffer = ByteBuffer.allocate(10 * 1024); public ByteBufferInputStream byteBufferInputStream = new ByteBufferInputStream(ByteBuffer.allocate(10 * 1024)); public Input input = new Input(byteBufferInputStream); Connection(SocketChannel socketChannel, Iterator<T> toSend) { this.socketChannel = socketChannel; this.toSend = toSend; } } List<T> store = new LinkedList<>(); Map<SocketChannel, Connection> connections = new HashMap<>(); public void run() throws IOException { Selector selector = Selector.open(); ServerSocketChannel serverSocketChannel = ServerSocketChannel.open(); serverSocketChannel.bind(new InetSocketAddress(port)); serverSocketChannel.configureBlocking(false); serverSocketChannel.register(selector, serverSocketChannel.validOps(), null); // log.info("Server Ready"); while (serverSocketChannel.isOpen()) { selector.select(); for (SelectionKey selectedKey : selector.selectedKeys()) { if (!selectedKey.isValid()) { log.info("Invalid key: " + selectedKey); continue; } if (selectedKey.isAcceptable()) { ServerSocketChannel selectedServer = (ServerSocketChannel) selectedKey.channel(); SocketChannel client = selectedServer.accept(); if (client == null) { // log.info("Invalid client: " + Integer.toBinaryString(selectedKey.readyOps())); // log.info("failed to accept"); continue; } client.configureBlocking(false); // java.lang.NullPointerException client.register(selector, client.validOps()); // client.register(selector, SelectionKey.OP_WRITE); connections.put(client, new Connection(client, store.iterator())); log.info("Connection Accepted: " + client.getLocalAddress()); } if (selectedKey.isReadable() || selectedKey.isWritable()) { SocketChannel client = (SocketChannel) selectedKey.channel(); if (client == null) { log.info("Invalid client: " + selectedKey); continue; } if (selectedKey.isReadable()) { Connection connection = connections.get(client); int read = client.read(connection.inputByteBuffer); // PipedInputStream pipedInputStream = new PipedInputStream(); // pipedInputStream. // connection.inputByteBuffer.flip().duplicate() connection.input.setBuffer(connection.inputByteBuffer.array()); try { T next = serializer.read(kryo, connection.input, typeParameterClass); store.add(next); connection.inputByteBuffer.clear(); log.info("Message received: " + next); } catch (com.esotericsoftware.kryo.KryoException e) { log.warn(e.getMessage()); client.close(); serverSocketChannel.close(); break; } } if (selectedKey.isWritable()) { Connection connection = connections.get(client); if (connection.toSend.hasNext()) { T next = connection.toSend.next(); serializer.write(kryo, connection.output, next); connection.output.flush(); client.write(connection.byteBufferOutputStream.getByteBuffer()); log.info("Message sent: " + next); } } } } // selector.keys().clear(); // java.lang.UnsupportedOperationException // at java.util.Collections$UnmodifiableCollection.clear(Collections.java:1076) // at br.ufscar.dc.gsdr.mfog.RelayService.run(RelayService.java:89) } serverSocketChannel.close(); } }
/* * Drop user mapping by OID. This is called to clean up dependencies. */ void RemoveUserMappingById(Oid umId) { HeapTuple tp; Relation rel; rel = heap_open(UserMappingRelationId, RowExclusiveLock); tp = SearchSysCache1(USERMAPPINGOID, ObjectIdGetDatum(umId)); if (!HeapTupleIsValid(tp)) elog(ERROR, "cache lookup failed for user mapping %u", umId); simple_heap_delete(rel, &tp->t_self); ReleaseSysCache(tp); heap_close(rel, RowExclusiveLock); }
/** * Created by Administrator on 2018/4/2. */ @Api( value = "店员购物车管理", description = "店员购物车的接口",tags="店员-购物车接口") @RestController("store_user_cart_controller") @RequestMapping("/api/store_user/cart") @CrossOrigin public class CartController extends BaseController { @Autowired StoreRepository storeRepository; @Autowired GoodsRepository goodsRepository; @Autowired CartRepository cartRepository; @Autowired TablesRepository tablesRepository; @ApiOperation(value="列表-获取指定桌子的已点菜品", notes="列表-获取指定桌子的已点菜品") @ApiImplicitParams({ @ApiImplicitParam(name = "page", value = "页码", required = false, dataType = "int", paramType = "query"), @ApiImplicitParam(name = "pageSize", value = "每页行数,默认10条", required = false, dataType = "int", paramType = "query"), }) @RequestMapping(value = "",method = RequestMethod.GET) @ResponseBody public MsgVo list( @RequestParam(value = "table_id")Long table_id, @RequestParam(value = "keyword",defaultValue = "")String keyword, @RequestParam(value = "page",defaultValue = "1")Integer page, @RequestParam(value = "pageSize",defaultValue = "10")Integer pageSize, HttpServletRequest request){ MsgVo msgVo =new MsgVo(); StoreUser storeUser = (StoreUser) request.getAttribute("user"); Criteria<Cart> criteria = new Criteria<>(); criteria.add(Restrictions.eq("store",storeUser.getStore())); criteria.add(Restrictions.eq("tables",tablesRepository.findById(table_id).orElse(null))); List<Cart> carts = cartRepository.findAll(criteria); Map<Long,Cart> cartNumMap = new HashMap<>(); if(carts.size() > 0){ for(Cart cart:carts){ cartNumMap.put(cart.getGoods().getId(),cart); } } Criteria<Goods> goodsCriteria = new Criteria<>(); goodsCriteria.add(Restrictions.eq("store",storeUser.getStore())); goodsCriteria.add(Restrictions.eq("status",2)); goodsCriteria.add(Restrictions.eq("isDelete",false)); if(!keyword.trim().equals("")){ goodsCriteria.add(Restrictions.or(Restrictions.like("name","%"+keyword+"%"),Restrictions.like("number","%"+keyword+"%"))); } List<Goods> goods = goodsRepository.findAll(goodsCriteria); if(cartNumMap.size() > 0 && goods.size() > 0){ for (Goods goods1:goods){ Long gid = goods1.getId(); if(cartNumMap.containsKey(gid)){ goods1.setCart_num(cartNumMap.get(gid).getNum()); goods1.setNote(cartNumMap.get(gid).getNote()); }else{ goods1.setNote(""); } } } msgVo.getData().put("goods",goods); msgVo.getData().put("carts",carts); msgVo.setMsg("获取成功"); return msgVo; } @ApiOperation(value="变更购物车中的菜品", notes="变更购物车中的菜品") @ApiImplicitParams({ }) @RequestMapping(value = "/table/{table_id}/goods/{goods_id}",method = RequestMethod.POST) @ResponseBody public MsgVo delete(@PathVariable("table_id")Long table_id,@PathVariable("goods_id")Long goods_id,@RequestParam("num")int num,@RequestParam(value = "note",defaultValue = "")String note){ MsgVo msgVo = new MsgVo(); StoreUser storeUser = (StoreUser) request.getAttribute("user"); Tables tables = tablesRepository.findById(table_id).orElse(null); if(tables == null || tables.getStore().getId() != storeUser.getStore().getId()){ msgVo.setCode(40001); msgVo.setMsg("餐桌不存在"); return msgVo; } Goods goods = goodsRepository.findById(goods_id).orElse(null); if(goods == null || goods.getStore().getId() != storeUser.getStore().getId() || goods.getStatus() ==1){ msgVo.setCode(40002); msgVo.setMsg("菜品不存在或已下架"); return msgVo; } Criteria<Cart> criteria = new Criteria<>(); criteria.add(Restrictions.eq("store",storeUser.getStore())); criteria.add(Restrictions.eq("tables",tables)); criteria.add(Restrictions.eq("goods",goods)); criteria.add(Restrictions.isNull("consumer")); Cart carts = cartRepository.findOne(criteria).orElse(null); if(num > 0){//增加的 if(carts == null){//创建 Cart cart = new Cart(); cart.setTables(tables); cart.setGoods(goods); cart.setNum(num); cart.setNote(note); cart.setStore(storeUser.getStore()); cartRepository.save(cart); }else{ carts.setNum(num); carts.setStore(storeUser.getStore()); carts.setNote(note); cartRepository.save(carts); } saveStoreLog(storeUser.getStore(),storeUser,"分店店员"+storeUser.getName()+"向购物车餐桌"+tables.getNumber()+"中增加菜品"+num+"份"+goods.getName()); }else{ cartRepository.delete(carts); saveStoreLog(storeUser.getStore(),storeUser,"分店店员"+storeUser.getName()+"删除购物车餐桌"+tables.getNumber()+"中删除菜品"+num+"份"+goods.getName()); } return msgVo; } @ApiOperation(value="购物车变更桌子", notes="购物车变更桌子") @ApiImplicitParams({ }) @RequestMapping(value = "",method = RequestMethod.PUT) @ResponseBody public MsgVo updateTable(@RequestParam(value = "to_table_id")Long to_table_id, @RequestParam(value = "from_table_id")Long from_table_id){ MsgVo msgVo = new MsgVo(); StoreUser storeUser = (StoreUser) request.getAttribute("user"); Tables fromTables = tablesRepository.findById(from_table_id).orElse(null); if(fromTables == null || fromTables.getStore().getId() != storeUser.getStore().getId()){ msgVo.setCode(40002); msgVo.setMsg("当前餐桌不存在"); return msgVo; } Tables toTtables = tablesRepository.findById(to_table_id).orElse(null); if(toTtables == null || toTtables.getStore().getId() != toTtables.getStore().getId()){ msgVo.setCode(40003); msgVo.setMsg("目标餐桌不存在"); return msgVo; } Criteria<Cart> criteria = new Criteria<>(); criteria.add(Restrictions.eq("store",storeUser.getStore())); criteria.add(Restrictions.eq("tables",fromTables)); List<Cart> carts = cartRepository.findAll(criteria); if(carts.size() > 0) { for (Cart cart : carts) { cart.setTables(toTtables); } cartRepository.saveAll(carts); } fromTables.setStatus(1); toTtables.setStatus(2); tablesRepository.save(fromTables); tablesRepository.save(toTtables); saveStoreLog(storeUser.getStore(),storeUser,"分店店员"+storeUser.getName()+"修改购物车中的桌号"+fromTables.getNumber()+"为"+toTtables.getNumber()); return msgVo; } }
<filename>tests/test_messages/test_inbound/test_user_reset_detected.py """Tesst User Reset Detected.""" import unittest from binascii import unhexlify from pyinsteon.constants import MessageId from tests import set_log_levels from tests.utils import hex_to_inbound_message class TestUserResetDetected(unittest.TestCase): """Tesst User Reset Detected.""" def setUp(self): """Test set up.""" self.hex = "0255" self.message_id = MessageId(0x55) self.msg, self.msg_bytes = hex_to_inbound_message(self.hex) set_log_levels( logger="info", logger_pyinsteon="info", logger_messages="info", logger_topics=False, ) def test_id(self): """Test ID.""" assert self.msg.message_id == self.message_id def test_bytes(self): """Test bytes.""" assert bytes(self.msg) == unhexlify(self.hex)
def to_representation(self:Learner): xb,yb = self.dls.train.one_batch() def _get_info(m, i, o): params,trainable = total_params(m) m._xtra = {'params': params, 'trainable': trainable, 'shape': o.shape} model = self.model.to(xb.device) layers = flatten_model(model) with Hooks(layers, _get_info) as h: model.eval()(xb) nodes = [Node('Input', 0, 'Input', xtra={'shape':list(xb.shape)}), self.model.to_representation(xtra={'open': True}), Node('Output', 0, 'Output')] links = [{'source':i, 'target':i+1} for i in range_of(nodes)] rep = Representation(Node('Learner', 0, 'Learner', nodes=nodes, links=links, xtra={'open': True})) _update_shapes(rep.data) for layer in layers: del(layer._xtra) nodes[-1].xtra['shape'] = nodes[-2].xtra.get('shape') return rep
package org.apereo.cas.support.events.mongo; import org.apereo.cas.config.CasCoreHttpConfiguration; import org.apereo.cas.config.MongoDbEventsConfiguration; import org.apereo.cas.support.events.AbstractCasEventRepositoryTests; import org.apereo.cas.support.events.CasEventRepository; import org.apereo.cas.util.junit.EnabledIfPortOpen; import lombok.Getter; import org.junit.jupiter.api.Tag; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration; /** * Test cases for {@link MongoDbCasEventRepository}. * * @author <NAME> * @since 5.2.0 */ @Tag("MongoDb") @SpringBootTest(classes = { MongoDbEventsConfiguration.class, CasCoreHttpConfiguration.class, RefreshAutoConfiguration.class }, properties = { "cas.events.mongo.user-id=root", "cas.events.mongo.password=<PASSWORD>", "cas.events.mongo.host=localhost", "cas.events.mongo.port=27017", "cas.events.mongo.authentication-database-name=admin", "cas.events.mongo.database-name=events", "cas.events.mongo.drop-collection=true" }) @Getter @EnabledIfPortOpen(port = 27017) public class MongoDbCasEventRepositoryTests extends AbstractCasEventRepositoryTests { @Autowired @Qualifier("casEventRepository") private CasEventRepository eventRepository; }
def pattern2array3d(rate, pattern, speed, z_steps, delta_z, speed_limit=10, p0=np.array([0, 0, 0]), y_range=100, z_range=100, point_limit=3333, cut=False, margin=5): if np.max(speed) > speed_limit: print("Maximal speed is bigger than speed limit: ", speed_limit, "\n") return dy = speed * rate p = [pattern[0], 0, 0] array = [np.copy(p0 + p)] ds = max(speed) * rate z_steps = min(z_steps, int(np.floor(z_range / delta_z))) odd_pattern = len(pattern) % 2 == 1 dx = np.min(np.abs(pattern[:-1] - pattern[1:])) if dx == 0: dx = ds counter = 0 for z in range(z_steps): for x in range(len(pattern)): if odd_pattern: inverted = (x + z) % 2 == 1 else: inverted = x % 2 == 1 p, tmp_arr = move(p0, p, pattern[x], ds=np.min([ds, 0.3 * dx]), coord=0) if cut and counter + len(tmp_arr) > point_limit: array.append([-1, -1, -1]) counter = len(tmp_arr) else: counter += len(tmp_arr) array += list(tmp_arr) p[0] = pattern[x] p, tmp_arr = move_slow(p0, p, 0 if inverted else y_range, dy[x], coord=1, margin=margin) if cut and counter + len(tmp_arr) > point_limit: array.append([-1, -1, -1]) counter = len(tmp_arr) else: counter += len(tmp_arr) array += list(tmp_arr) if z < z_steps - 1: dz = np.min([ds, 0.3 * delta_z]) p, tmp_arr = move(p0, p, -(z + 1) * delta_z, dz, coord=2) if cut and counter + len(tmp_arr) > point_limit: array.append([-1, -1, -1]) counter = len(tmp_arr) else: counter += len(tmp_arr) array += list(tmp_arr) p[2] = -(z + 1) * delta_z pattern = pattern[::-1] return array
/// Fetch a note object given an index `note_index`. This note index can be the root index of the note /// or any of the child indexes of the note. If a child index for a specific revision of the note is passed /// then that revision will be fetched, otherwise latest revision is the default. pub fn fetch_note( &mut self, notebook_ship: &str, notebook_name: &str, note_index: &str, ) -> Result<Note> { // check index let index = NotebookIndex::new(note_index); if !index.is_valid() { return Err(UrbitAPIError::InvalidNoteGraphNodeIndex( note_index.to_string(), )); } // root note index let note_root_index = index.note_root_index(); // get the note root node let node = &self .channel .graph_store() .get_node(notebook_ship, notebook_name, &note_root_index)?; let revision = match index.is_note_revision() { true => Some(note_index.to_string()), false => None, }; return Ok(Note::from_node(node, revision)?); }
def HandleCreateRandomAddress( self, label, eighteenByteRipe=False, totalDifficulty=0, smallMessageDifficulty=0 ): nonceTrialsPerByte = self.config.get( 'bitmessagesettings', 'defaultnoncetrialsperbyte' ) if not totalDifficulty else int( defaults.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty) payloadLengthExtraBytes = self.config.get( 'bitmessagesettings', 'defaultpayloadlengthextrabytes' ) if not smallMessageDifficulty else int( defaults.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty) if not isinstance(eighteenByteRipe, bool): raise APIError( 23, 'Bool expected in eighteenByteRipe, saw %s instead' % type(eighteenByteRipe)) label = self._decode(label, "base64") try: label.decode('utf-8') except UnicodeDecodeError: raise APIError(17, 'Label is not valid UTF-8 data.') queues.apiAddressGeneratorReturnQueue.queue.clear() streamNumberForAddress = 1 queues.addressGeneratorQueue.put(( 'createRandomAddress', 4, streamNumberForAddress, label, 1, "", eighteenByteRipe, nonceTrialsPerByte, payloadLengthExtraBytes )) return queues.apiAddressGeneratorReturnQueue.get()
/* * call-seq: * RequestGlobal.delete(key) -> value * * Delete the key-value pair for the given key and return the value from storage for current request. */ static VALUE request_global_delete(VALUE self, VALUE key) { check_condition(self); return rb_hash_delete(request_global_current_storage(self), key); }
<reponame>flowant/practiceJava package practice.tree; import org.junit.Test; import java.util.concurrent.atomic.AtomicInteger; public class Diameter { static class Node { int data; Node left, right; public Node() { } public Node(int data) { this.data = data; } } /* I'm going to use a recursive function named "Diameter". The function's input parameters are a node and height. height type is AtomicInteger which keep it's storage between a function call and return. The function returns 0 if a node parameter has no child. store the height variable with one plus the maximum height among children's height. The function return diameter that is the maximum value between return values of recursive call and a sum of children's heights + 1. */ public int diameter(Node node, AtomicInteger height) { if (node == null) { return 0; } AtomicInteger lHeight = new AtomicInteger(height.get()); AtomicInteger rHeight = new AtomicInteger(height.get()); int lDiameter = diameter(node.left, lHeight); int rDiameter = diameter(node.right, rHeight); int diameter = lHeight.get() + rHeight.get() + 1; height.set(Math.max(lHeight.get(), rHeight.get()) + 1); return Math.max(diameter, Math.max(lDiameter, rDiameter)); } public int diameter(Node node) { return diameter(node, new AtomicInteger()); } @Test public void test() { /* 1 / \ 2 3 / \ 4 5 */ Node root = new Node(1); root.left = new Node(2); root.right = new Node(3); root.left.left = new Node(4); root.left.right = new Node(5); System.out.println(diameter(root)); } }
/* Round X to the nearest integer, but round halfway cases away from zero. */ void real_round (REAL_VALUE_TYPE *r, enum machine_mode mode, const REAL_VALUE_TYPE *x) { do_add (r, x, &dconsthalf, x->sign); do_fix_trunc (r, r); if (mode != VOIDmode) real_convert (r, mode, r); }
<filename>website/conftest.py import pytest from faker import Faker @pytest.fixture() def fake(): return Faker('pt_BR')
A New Look to Massive Neutron Cores We reconsider the problem of modelling static spherically symmetric perfect fluid configurations with an equation of state from a point of view of that requires the use of the concept of principal transform of a 3-dimensional Riemannian metric. We discuss from this new point of view the meaning of those familiar quantities that we call density, pressure and geometry in a relativistic context. This is not simple semantics. To prove it we apply the new ideas to recalculate the maximum mass that a massive neutron core can have. This limit is found to be of the order of 3.8 $M_\odot$ substantially larger than the Oppenheimer and Volkoff limit. Introduction We review in Section 1 the basic equations of the models being considered as well as the concept of principal transform of a 3-dimensional Riemannian metric which is at the core of our new point of view to understand these models. Section 2 is devoted to lay down the fundamental system of equations to be integrated. We use spherical space coordinates of the quo-harmonic class which allow to implement C 1 class smoothness across a sharp border when there is one. In Section 3 we define the concept of proper mass M p to be used to define the binding energy E b of the models as the difference between the active gravitational mass M a and M p . We define also the concept of proper mass density which is a fundamental hybrid concept related to M p and the principal transform of the quotient space metric. Its relation with the pressure p characterizes the fluid source independently of the solution being considered. Section 4 is devoted to establish that the binding energy E b can be obtained as the integral over all space of a localized energy density σ which depends only on some of the gravitational potentials and its first radial derivatives. In Section 5 we linearize the fundamental system of equations to obtain the linearized expression of the binding energy E b , which coincides with the familiar Newtonian one, thus providing a partial justification to the definition of E b in the non linear regime. We obtain also the linearized expression of the energy density σ. The last section contains our proposed application of the new point of view to the study of massive neutron cores. The equation of state is the usual one for a degenerate neutron gas, but both the density and the pressure are variants of those used by Oppenheimer and Volkoff . Our main result is that the maximum limit mass that a neutron core can have is approximately Static Spherically Symmetric Models We shall be interested in this paper on global spherically symmetric models, which we shall write using a time adapted coordinate and polar-like space coordinates: where: solution of Einstein's field equations: where the r-h-s describes a compact fluid source, or with fast decreasing density, with two "flavours": isotropic or a special kind of anisotropic pressure to be presented in a moment. The quotient 3-dimensional metric (2) can be written using a variety of supplementary coordinate conditions belonging to two different types: algebraic or differential. The most often used is the curvature condition which uses a radialr coordinate with the algebraic condition: We shall refer also to any other quantity which assumes the use of this radial coordinate with a tilde overhead. We shall use here almost exclusively the quo-harmonic condition which restricts the r coordinate with the differential condition: where the prime means derivative with respect to r. The use of a differential condition makes possible the construction of global C 1 models with a sharp boundary with vacuum, something which is not possible when using the coordinater. Notice that if B(r) and C(r) are known the curvature coordinater is simply the following function of r: while, on the contrary, to obtain explicitly the inverse function is in most cases of interest impossible or very cumbersome. Despite the emphasis we put on the use of the quo-harmonic coordinate r let us be clear from the beginning that the main conclusion of this paper will not owe anything to a particular choice of coordinates. It will owe instead all to a new concept: that of a principal transformation of a 3-dimensional Riemannian metric such as (2). By definition, in the particular case we are considering, the principal transform of (2) is a new 3-dimensional Riemannian metric 1 : such that: i)R i jkl = 0, i, j, k, l = 1, 2, 3 and: ii) where, with otherwise obvious notations, the quantities with a hat overhead refer to the metric (2) and the quantities with a bar overhead refer to the metric (7). Notice that both conditions above being tensor conditions under any transformation of space coordinates the concept of Principal transformation is intrinsic to the Killing time congruence we are considering. One of the practical conveniences of using polar quo-harmonic space coordinates from the outset in the very process of model-building is that, when appropriate boundary conditions are taken into account, the principal transform of (2) is just: and: We shall consider two types of energy-momentum tensors: i) those describing standard perfect fluids with isotropic pressure: and a new type of fluid where the isotropy of the pressure is meant in the sense of the principal transform (7): i.e. in the sense of (10) if polar quo-harmonic coordinates are used. Notice that this second case, that we shall present in Section 3 as being the truly isotropic fluid, can be considered from the standard point of view as an anisotropic fluid of a particular type. Namely one for which the radial and tangential pressures are related to a single function p(r) as follows: We can deal with both cases at once introducing in the field equations a two-valued flag f = 0 or f = 1 and the following expressions for p r and p t : thus f = 0 corresponds to the standard case and f = 1 corresponds to the new case. As usual we shall make use of a compressibility equation: depending on convenience, to describe the physics of the source. The boundary conditions at infinity will be: where Ξ = A, B or C; and the regularity conditions at the centre of symmetry of the configuration will be: Finally, in those cases where the model has a sharp boundary between an interior with ρ > 0 and vacuum we shall require the continuity of both Xi and X ′ , so that the space-time metric will be of global C 1 class. The system of units we shall use throughout will be such that: wherec is the speed of light in vacuum andḠ is Newton's constant. A more specific system of units of this class will be chosen in Sect. 6. Explicit field equations Taking into account the coordinate condition (5) and also its derivative: the field equation S 00 = ρA 2 can be written as follows: The field equation S 22 = −p t BCr 2 (or S 33 = −p t BCr 2 sin 2 θ) can be written using the preceding Eq. (21) and the second Eq. (15) as: The remaining equation we have to take care of is S 11 = −p r B 2 , or taking into account (5): On the other hand from the conservation equation: or: and Eqs. (15) we derive the equation: Taking into account the regularity conditions (18) at the centre of symmetry of the configuration in the preceding equations it is easy to see that they imply: Therefore the remaining field equation S 11 = −p r B 2 is satisfied at the origin r = 0 and from from (21), (22) and (25) it follows that it is satisfied everywhere. What follows is the summary of this section and the preceding one: The models we are considering will be fully described by the field variables A, B, C and the source variables ρ, p; the latter being related by a compressibility equation of either type (16). This complete set of variables is constrained to satisfy the system of differential equations (5), (21), (22) and (26). Appropriate initial conditions will be A 0 , B 0 = C 0 and ρ 0 > 0 (or p 0 > 0).The initial values of A 0 and B 0 have to be chosen such that the asymptotic conditions (17) are satisfied. The boundary of the source will be defined by the first zero r = R of the pressure p, and beyond the vacuum field equations will be required. The continuity of A, B and C and its first derivatives is automatically implemented across the boundary of the source. Physical and geometrical interpretations This will be the more difficult section of this paper, although it does not contain any calculation, because it deals about the meaning of words of common use. When we look at Eqs. (3) as equations to be solved we all refer to the r-h-s as the source term. But this is not quite correct because the energymomentum tensor depends on the coefficients of the unknown metric. In the case we are considering in this paper the real source variables are the so called density ρ and pressure p related by a compressibility equation (16). The meaning of these three ingredients deserve to be examined with some detail. A density by definition is a mass per unit volume, and a pressure is a force per unit surface. Therefore to be clear about them we must tell of what mass are we talking about and to what geometry of space are we referring when using the words volume and surface. As we all know the concept of mass is tricky because it comes in three flavours: inertial mass, passive gravitational mass and active gravitational mass. Newtonian theory assumes the proportionality of the three masses and General relativity assumes the proportionality of inertial and passive gravitational masses of test bodies. Beyond that we have a few decisions to be taken. To decide what geometry of space to use is also a tricky problem in relativity theory because we have to decide whether this geometry has to be known before we solve the field equations or will be known only after they have been solved, in which case the meaning of ρ, p and the compressibility Eq. (16) will also be known only after the problem has been solved. Of the three types of mass, only the meaning of active gravitational mass was settled very early by Tolman identifying it with the Newtonian mass at infinity and proving that it can be calculated as the following integral over the source: where we remind thatr is the curvature radial coordinate. Using (6) this formula can be written equivalently as: where: Bonnor proposed to eliminate the ambiguities that remain defining the passive gravitational mass as: This means that ρ + p is interpreted as a density of inertial, or passive, gravitational mass and that the metric that gives a meaning to the word volume is (2). But this metric is known only once the problem has been solved and then it depends on the point of the body which is considered. This deprives the meaning of the variables ρ and p and the compressibility equation (16) of any a priori significance. Other difficulties that arise from this definition are discussed in Bonnor's paper. We do not believe that the consideration of a single body at rest, as we have been doing here, can say anything about its passive or inertial gravitational mass because this would require to know how it reacts to the presence of another comparable body. On the other hand we believe that we should be able to define its proper mass M p if we want to know what is the binding energy E b of any given configuration as defined by: More precisely, our point of view, along the lines of a long enduring effort to understand the concept of rigidity and establishing a theory of frames of reference in special and general relativity, consists in accepting the usual generalization of Schwarzschild's "substantial mass" 2 as proper mass: and defining at the same time a proper mass density ρ p : such that M p could be written: This means then defining ρ p as a density of proper gravitational mass and interpreting the words volume and surface in the sense of the universal euclidian geometry (10) related to the quotient metric (2) by a principal transformation (7). This guarantees the independence of the meanings of ρ p , p and the compressibility equation (16) independently of the location of the element of the fluid in the object and independently of the solution of the field equations that one is considering. This guarantees also that M p can be identified with an appropriate number of identical samples of a fluid as weighted with a balance at the "shop store" before being assembled into the body. The interpretation we have just given of the metric (10) implies also as a corollary that, as above-mentioned, a fluid with isotropic pressure should be described by an energy-momentum tensor as written in (13). And that for the compressibility equation to have a well defined a priori meaning it should be given as a relationship between ρ p and p: Localized energy density Following suit to the ideas of the preceding section we exhibit the quantity E b as an integral extended over all space of an energy density function depending only on r, B, C and B ′ , C ′ . From (29) and (35) it follows that: where: while ρB 3 can be obtained from (21) as: with: Therefore we have: Integrating by parts the second integral we obtain: where: with: and therefore, the limit in (42) being zero, the final result is: Linear approximation We consider here the linear approximation of the models that we have been considering, to take a closer look to two of the de concepts that we have implemented in Sect. 3. Namely: the mass defect, or binding energy E b and the proper mass density ρ p . We assume that A, B and C can be written as: where A 1 , B 1 and C 1 are small quantities, of order ǫ say. We assume also that ρ is also of order ǫ and that p is of order ǫ 2 and can be ignored, as well as any other quantity of the same order or smaller, in the field equations. The coordinate condition (5) and the field equations (21), (22) become then: As our purpose is purely illustrative here we consider below the simplest case where the source is a spherical body of finite radius R and constant ρ. The interior solution satisfying the regularity conditions (18) at the centre is: where a 0 and b 0 are two allowed constants of integration; the exterior solution satisfying the asymptotic conditions (17) at infinity is: where a 1 and b 3 are two new constants of integration. Demanding the continuity of A 1 , B 1 and C 1 and their first derivatives across the border r = R fixes a 0 and all the b's as follows: From the preceding results we can calculate the leading approximation, which is of order ǫ 2 , of the localized energy density (43). For r < R the result is using an arbitrary system of units: where at this approximation M is either M a or M p . For r > R the result is: The binding energy can be calculated using (46), (54) and (55), or (32), (29) and (35) at the appropriate approximation. The result is, using arbitrary units, the familiar Newtonian amount: a result that can be obtained using a variety of other approaches 3 Massive Neutron Cores Any particular model will be characterized by an equation of state and the value of its central density, or central pressure, or both in the important case in which one assumes that the density is constant. Taking into account the regularity conditions the initial conditions of the gravitational potentials A, B and C have to be chosen such that: And taking into account the asymptotic conditions the values of A 0 and B 0 have to be chosen such that: the condition: as, well as the remaining asymptotic conditions, being then automatically satisfied because the solution behaves as the exterior Schwarzschild one at infinity. The numerical integration of the system of equations (5), (21), (22) and (26) where: and ρ p is a known function of p, is a trial and error procedure. Arbitrary values of A 0 , B 0 and p 0 > 0 have to be chosen; the integration has to proceed until p = 0; then the equation of state has to be abandoned and ρ = p = 0 has to be required; the integration has then to proceed to sufficiently large values of r to check the asymptotic conditions (58). If the check is not satisfactory the whole process has to be started again with new values of A 0 , B 0 and p 0 > 0. As an important example we consider the equation of state of a degenerate neutron gas as it suits to a model of massive neutron cores. This was considered in a famous paper by Oppenheimer and Volkoff from the standard point of view which consists in putting f = 0. Our point of view consist in using instead the value f = 1. The equation of state can be written in parametric form, including both points of view, as: where,using arbitrary units: m being the mass of a neutron. We recall below the values of M a obtained in for several values of the inial values of u 0 , and include the values of M p as calculated from (33): The system of units that has been used is that satisfying (19) completed with the supplementary condition: which is the choice made in to define a unit of mass. The most notorious result is existence of a maximum mass corresponding approximately to u 0 = 3 whose value is M a = 0.078 which corresponds to M a = 0.71 M ⊙ . Oppenheimer and Volkoff concluded also from a very crude non relativistic argument that above u 0 = 3 the equilibrium configurations were not stable. Here also we obtain that there is a maximum mass M a = 0.41 which corresponds to M a = 3.8 M ⊙ but it is substantially larger than the value in as well as the mass of some models with anisotropic pressure considered by Corchero in . It is even somewhat larger than the limit value, M a = 3.2 M ⊙ obtained by Rhoades and Ruffini in from very general considerations complying with the conventional point of view. it, and their comments as well as those of E. S. Corchero and J. Martín. I also gratefully acknowledge the position of visiting professor to the UPV/EHU that I have been holding while this paper was being prepared.
Alex Thomas The Intel Hub 25 letters that claim nuclear bombs are hidden throughout the United States have been sent to multiple investigators and citizens in the Chicago area. The letter inside said, “The Al-Qaeda organization has planted 160 nuclear bombs throughout the U.S. in schools, stadiums, churches, stores, financial institutions and government buildings.” It also said, “This is a suicide mission for us,” reported CBS Chicago. While this news may be startling to many, it is no surprise to those in the alternative media. The idea of nukes in the United States has run rampant for years, with many believing rogue elements of our government will actually use these nukes within the United States to create enough chaos to initiate a world government. In 2002 the right wing news organization World Net Daily reported that Bin Laden had snuck over 20 suitcase nukes into the United States, A new book by an FBI consultant on international terrorism says Osama bin Laden’s al-Qaida terrorist network purchased 20 suitcase nuclear weapons from former KGB agents in 1998 for $30 million. The book,“Al Qaeda: Brotherhood of Terror,” by Paul L. Williams, also says this deal was one of at least three in the last decade in which al-Qaida purchased small nuclear weapons or weapons-grade nuclear uranium. This report was one of the first out of a long line of fear mongering reports that have been used to plant the idea of Al Qaeda nuking us into the mind of everyday American citizens. Most remember the “missing nukes” report that broke in 2007. Apparently several nukes were lost for upwards of 36 hours after leaving U.S.A.F Base Minot in North Dakota. According to a wide range of reports, several nuclear bombs were “lost” for 36 hours after taking off August 29/30, 2007 on a “cross-country journey” across the U.S., from U.S.A.F Base Minot in North Dakota to U.S.A.F. Base Barksdale in Louisiana. Reportedly, in total there were six W80-1 nuclear warheads armed on AGM-129 Advanced Cruise Missiles (ACMs) that were “lost.” The story was first reported by the Military Times, after military servicemen leaked the story, reported Mahdi Darius Nazemroaya. The idea that Al Qaeda could place over a hundred nuclear weapons into government buildings throughout the United States is 100% impossible. Cave dwelling ninjas do not have the ability to either steal or create nuclear weapons, fly them over to America, and somehow plant them in government buildings. The fact that the Pentagon and the CIA run Al Qaeda, have dined with top “terrorist” Anwar al-Awlaki, and openly fund Al Qaeda seems to be unimportant to the corporate controlled media. This isn’t a realistic option yet reports like this are used to trick ignorant Americans into hating Muslims even more. This also gives elements of our government the ability to say, “told you so” if or when a nuke detonates in an American city.
//AddSource add source to clm memory. func AddSource(name string, s implement.Implement) bool { defer SourcesRegistered.Unlock() SourcesRegistered.Lock() if _, ok := SourcesRegistered.m[name]; ok { sLog.V(utils.Warn).Info("source exist already", "name", name) return false } SourcesRegistered.m[name] = s return true }
<reponame>EngineerToBe/python-labs # Create a function named not_sum_to_ten() that has two parameters named num1 and num2. # Return True if num1 and num2 do not sum to 10. Return False otherwise. def not_sum_to_ten(num1, num2): if num1 + num2 != 10: return True else: return False
// pcap docs: https://linux.die.net/man/3/pcap #include <boost/algorithm/string/replace.hpp> #include <boost/algorithm/string/trim.hpp> #include <algorithm> #include <cstring> #include <cctype> #include <fstream> #include <iostream> #include <map> #include <memory> #include <string> #include <netdb.h> #include <net/ethernet.h> #include <netinet/ip.h> #include <netinet/tcp.h> #include <netinet/udp.h> #include <pcap/pcap.h> std::string const g_config = "/etc/blacklist/blacklist.conf"; typedef std::map<std::string, std::string> params_t; typedef std::vector<std::string> blacklist_t; char g_errbuf[PCAP_ERRBUF_SIZE]; bool my_isspace(char ch) { return std::isspace(static_cast<unsigned char>(ch)); } class cached_ip { public: typedef std::shared_ptr<cached_ip> pointer_t; typedef std::map<in_addr_t, pointer_t> map_t; cached_ip() : f_last_access(time(nullptr)) {} void access() { f_last_access = time(nullptr); } void searched() { f_searched = true; } bool was_searched() const { return f_searched; } void blacklisted() { f_blacklisted = true; } bool was_blacklisted() const { return f_blacklisted; } void set_host(std::string const & host); std::string const & get_host() const { return f_host; } bool has_host() const { return !f_host.empty(); } bool match(blacklist_t const & blacklist); private: bool f_searched = false; bool f_blacklisted = false; time_t f_last_access = 0; std::string f_host = std::string(); }; void cached_ip::set_host(std::string const & host) { if(host.empty()) { throw std::logic_error("set_host() called with an empty string"); } if(host[0] == '.') { f_host = host; } else { f_host = "." + host; } } bool cached_ip::match(blacklist_t const & blacklist) { for(auto & b : blacklist) { if(b.length() > f_host.length()) { // host name small than the blacklist URL, skip continue; } if(strcmp(b.c_str(), f_host.c_str() + f_host.length() - b.length()) == 0) { // this is a match // blacklisted(); return true; } } return false; } class pcap_filter { public: pcap_filter(); ~pcap_filter(); std::string get_value(std::string const & name) const; void init(); void run(); void handle_packet(pcap_pkthdr const * header, u_char const * packet); void handle_ipv4(in_addr const & dst, int port, int flags); private: params_t f_params = params_t(); pcap_t * f_pcap = nullptr; bool f_program_compiled = false; bpf_program f_program = bpf_program(); std::string f_ipset_add = std::string(); blacklist_t f_blacklist = blacklist_t(); cached_ip::map_t f_cached_ip = cached_ip::map_t(); }; void static_handle_packet(u_char * filter, pcap_pkthdr const * header, u_char const * packet) { reinterpret_cast<pcap_filter *>(filter)->handle_packet(header, packet); } pcap_filter::pcap_filter() { std::ifstream in(g_config); std::size_t line(0); std::string var; while(getline(in, var)) { ++line; boost::trim(var); if(var.empty()) { continue; } if(var[0] == '#') { continue; } std::string::size_type const pos(var.find('=')); std::string name(var.substr(0, pos)); boost::trim(name); if(name.empty()) { std::cerr << "error:" << line << ": variable name can't be empty." << std::endl; exit(1); } std::string value(var.substr(pos + 1)); if(name == "blacklist") { if(!value.empty()) { if(value[0] != '.') { value = "." + value; } // TODO: do more validations such as double dots, invalid dashes, etc. f_blacklist.push_back(value); } } else { f_params[name] = value; } } } pcap_filter::~pcap_filter() { if(f_program_compiled) { pcap_freecode(&f_program); } pcap_close(f_pcap); } std::string pcap_filter::get_value(std::string const & name) const { auto it(f_params.find(name)); if(it == f_params.end()) { return std::string(); } return it->second; } void pcap_filter::init() { std::string init_ipset(get_value("create_ipset")); if(init_ipset.empty()) { init_ipset = "ipset -exist create blacklist hash:ip family inet timeout 0 counters"; } if(system(init_ipset.c_str()) != 0) { int const e(errno); std::cerr << "error: could not initialize the blacklist ipset." << std::endl; if(e != 0) { std::cerr << "error: " << strerror(e) << std::endl; } exit(1); } f_ipset_add = get_value("ipset"); if(f_ipset_add.empty()) { f_ipset_add = "ipset add blacklist [ip]"; } std::string device(get_value("interface")); if(device.empty()) { device = "any"; } std::string const promiscuous(get_value("promiscuous")); int ms(20); // 20ms by default std::string timeout(get_value("timeout")); if(!timeout.empty()) { ms = std::stoi(timeout); } int buffer_size(IP_MAXPACKET); // 64Kb std::string user_size(get_value("buffer_size")); if(!user_size.empty()) { buffer_size = std::clamp(std::stoi(user_size), IP_MAXPACKET, 16 * 1024 * 1024); } f_pcap = pcap_open_live( device.c_str() , buffer_size , promiscuous == "on" , ms , g_errbuf); if(f_pcap == nullptr) { std::cerr << "error: opening a live pcap handle failed with: \"" << g_errbuf << "\"." << std::endl; exit(1); } // filter such as "src host 10.10.10.3" // std::string filter(get_value("filter")); if(filter.empty()) { // by default we limit to IPv4 UDP/TCP packets // filter = "ip udp tcp"; } // TODO: support the netmask (last parameter) f_program_compiled = pcap_compile(f_pcap, &f_program, filter.c_str(), 1, 0) != -1; if(!f_program_compiled) { std::cerr << "error: an error occured compiling the pcap filter: " << pcap_geterr(f_pcap) << std::endl; exit(1); } if(pcap_setfilter(f_pcap, &f_program) == -1) { std::cerr << "error: an error occured setting the pcap filter: " << pcap_geterr(f_pcap) << std::endl; exit(1); } } void pcap_filter::run() { if(f_pcap == nullptr) { std::cerr << "error: run() called with f_pcap == nullptr, did youc all init()?" << std::endl; exit(1); } pcap_loop(f_pcap, 0, ::static_handle_packet, reinterpret_cast<u_char *>(this)); } void pcap_filter::handle_packet(pcap_pkthdr const * header, u_char const * packet) { ether_header const * ether(reinterpret_cast<ether_header const *>(packet)); if(ntohs(ether->ether_type) != ETHERTYPE_IP) { return; } ip const * ip_info(reinterpret_cast<ip const *>(packet + sizeof(ether_header))); // valid IP header size? std::size_t const ip_size(ip_info->ip_hl << 2); if(ip_size < (sizeof(ip))) { return; } if(ip_info->ip_p == IPPROTO_UDP) { udphdr const * udp_info(reinterpret_cast<udphdr const *>(packet + sizeof(ether_header) + ip_size)); //std::cerr // << "UDP source IP " // << ((ip_info->ip_dst.s_addr) & 255) // << "." // << ((ip_info->ip_dst.s_addr >> 8) & 255) // << "." // << ((ip_info->ip_dst.s_addr >> 16) & 255) // << "." // << (ip_info->ip_dst.s_addr >> 24) // << ":" // << (udp_info->uh_dport >> 24) // << " -> "; handle_ipv4(ip_info->ip_src, ntohs(udp_info->uh_sport), NI_DGRAM); } else if(ip_info->ip_p == IPPROTO_TCP) { tcphdr const * tcp_info(reinterpret_cast<tcphdr const *>(packet + sizeof(ether_header) + ip_size)); //std::cerr // << "TCP source IP " // << ((ip_info->ip_dst.s_addr) & 255) // << "." // << ((ip_info->ip_dst.s_addr >> 8) & 255) // << "." // << ((ip_info->ip_dst.s_addr >> 16) & 255) // << "." // << (ip_info->ip_dst.s_addr >> 24) // << ":" // << (tcp_info->th_dport >> 24) // << " -> "; handle_ipv4(ip_info->ip_src, ntohs(tcp_info->th_sport), 0); } //else -- ignore the rest } void pcap_filter::handle_ipv4(in_addr const & dst, int port, int flags) { cached_ip::pointer_t cache(f_cached_ip[dst.s_addr]); // create it if it doesn't exist yet // if(cache == nullptr) { cache = std::make_shared<cached_ip>(); f_cached_ip[dst.s_addr] = cache; } // update last access time so it stays longer in the cache // cache->access(); // did we search it yet? // if(cache->was_searched()) { // it was already worked on, we're done // //std::cerr // << ((dst.s_addr) & 255) // << "." // << ((dst.s_addr >> 8) & 255) // << "." // << ((dst.s_addr >> 16) & 255) // << "." // << (dst.s_addr >> 24) // << ":" // << port // << " already found\n"; return; } cache->searched(); char host[NI_MAXHOST]; sockaddr_in addr = sockaddr_in(); addr.sin_family = AF_INET; addr.sin_port = port; addr.sin_addr = dst; int const r(getnameinfo( reinterpret_cast<sockaddr const *>(&addr) , sizeof(addr) , host , sizeof(host) , nullptr , 0 , flags | NI_NAMEREQD )); if(r != 0) { // could not determine domain name //std::cerr // << ((dst.s_addr) & 255) // << "." // << ((dst.s_addr >> 8) & 255) // << "." // << ((dst.s_addr >> 16) & 255) // << "." // << (dst.s_addr >> 24) // << ":" // << port // << " no domain name\n"; return; } std::cerr << "got IP " << ((addr.sin_addr.s_addr) & 255) << "." << ((addr.sin_addr.s_addr >> 8) & 255) << "." << ((addr.sin_addr.s_addr >> 16) & 255) << "." << (addr.sin_addr.s_addr >> 24) << ":" << port << " -> [" << host << "]! \n"; cache->set_host(host); // now check whether this domain name is blacklisted // if(cache->match(f_blacklist)) { // this is a match, block that IP address // char host_ip[NI_MAXHOST]; int const q(getnameinfo( reinterpret_cast<sockaddr const *>(&addr) , sizeof(addr) , host_ip , sizeof(host_ip) , nullptr , 0 , flags | NI_NUMERICHOST )); if(q != 0) { std::cerr << "error: getnameinfo() could not convert the IPv4 to a string." << std::endl; exit(1); } std::string add(f_ipset_add); boost::replace_all(add, "[ip]", host_ip); if(system(add.c_str()) != 0) { // it may be that all will fail, but this is not a fatal error // also it comes here when the element already exists // int const e(errno); std::cerr << "warning: \"" << add << "\" command failed << (e != 0 ? std::string("with: ") + strerror(e) : "") << "." << std::endl; } } } void usage() { std::cout << "Usage: blacklist" << std::endl; std::cout << " and edit the " << g_config << std::endl; } int main(int argc, char * argv[]) { for(int i(1); i < argc; ++i) { if(strcmp(argv[i], "-h") == 0 || strcmp(argv[i], "--help") == 0) { usage(); return 9; } std::cerr << "error: unknown command line option \"" << argv[i] << "\"." << std::endl; exit(1); } pcap_filter filter; filter.init(); filter.run(); return 0; } // vim: ts=4 sw=4 et
<gh_stars>0 #ifndef LOG_H #define LOG_H enum LogLevel { LOG_LEVEL_DEBUG = 0, LOG_LEVEL_INFO, LOG_LEVEL_WARNING, LOG_LEVEL_ERROR }; extern enum LogLevel log_threshold; void log_debug(const char *__restrict __format, ...); void log_info(const char *__restrict __format, ...); void log_warn(const char *__restrict __format, ...); void log_error(const char *__restrict __format, ...); #endif // LOG_H
<reponame>ppartarr/azure-sdk-for-java<gh_stars>1-10 // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.implementation.changefeed; import reactor.core.publisher.Mono; /** * Checkpoint the given partition up to the given continuation token. */ public interface PartitionCheckpointer { /** * Checkpoints the given partition up to the given continuation token. * * @param continuationToken the continuation token. * @return a deferred operation of this call. */ Mono<Lease> checkpointPartition(String continuationToken); }
/** * A SetupIntent object can be canceled when it is in one of these statuses: <code> * requires_payment_method</code>, <code>requires_confirmation</code>, or <code>requires_action * </code>. * * <p>Once canceled, setup is abandoned and any operations on the SetupIntent will fail with an * error. */ public SetupIntent cancel(SetupIntentCancelParams params, RequestOptions options) throws StripeException { String url = String.format( "%s%s", Stripe.getApiBase(), String.format("/v1/setup_intents/%s/cancel", ApiResource.urlEncodeId(this.getId()))); return ApiResource.request( ApiResource.RequestMethod.POST, url, params, SetupIntent.class, options); }
#include<stdio.h> int main() { int t; scanf("%d\n",&t); while(t--) { int n,fact=1,count=0; scanf("%d\n",&n); int w=n; while(w>0) { int d1=w%10; if(d1) count++; w/=10; } printf("%d\n",count); while(n>0) { int d=n%10; if(d==0) { fact*=10; n/=10; } else { printf("%ld ",(d*fact)); fact*=10; n/=10; } } printf("\n"); } }
<filename>main.go<gh_stars>0 package main import ( "fmt" "io/ioutil" "log" "github.com/golang/protobuf/jsonpb" "github.com/golang/protobuf/proto" complexpb "github.com/sitthakarn/proto-go-simple/src/complex" enumpb "github.com/sitthakarn/proto-go-simple/src/enum_example" simplepb "github.com/sitthakarn/proto-go-simple/src/simple" ) func main() { sm := doSimple() readAndWriteDemo(sm) jsonDemo(sm) doEnum() doComplex() } func doComplex() { cm := complexpb.ComplexMessage{ OneDummy: &complexpb.DummyMessage{ Id: 1, Name: "First message", }, MultipleDummy: []*complexpb.DummyMessage{ &complexpb.DummyMessage{ Id: 2, Name: "Second message", }, &complexpb.DummyMessage{ Id: 3, Name: "Third message", }, }, } fmt.Println(cm) } func doEnum() { em := enumpb.EnumMessage{ Id: 42, DayOfTheWeek: enumpb.DayOfTheWeek_THURSDAY, } em.DayOfTheWeek = enumpb.DayOfTheWeek_MONDAY fmt.Println(em) } func jsonDemo(sm proto.Message) { smAsString := toJSON(sm) fmt.Println(smAsString) sm2 := &simplepb.SimpleMessage{} fromJSON(smAsString, sm2) fmt.Println("Successfully created proto struct:", sm2) } func toJSON(pb proto.Message) string { marshaler := jsonpb.Marshaler{} out, err := marshaler.MarshalToString(pb) if err != nil { log.Fatalln("Can't convert to JSON", err) return "" } return out } func fromJSON(in string, pb proto.Message) { err := jsonpb.UnmarshalString(in, pb) if err != nil { log.Fatalln("Couldn't unmarshal the JSON into the pb struct", err) } } func readAndWriteDemo(sm proto.Message) { writeToFile("simple.bin", sm) sm2 := &simplepb.SimpleMessage{} readFromFile("simple.bin", sm2) fmt.Println("Read the content:", sm2) } func writeToFile(fname string, pb proto.Message) error { out, err := proto.Marshal(pb) if err != nil { log.Fatalln("Can't serialise to bytes", err) return err } if err := ioutil.WriteFile(fname, out, 0644); err != nil { log.Fatalln("Can't write to file", err) return err } fmt.Println("Data has been written!") return nil } func readFromFile(fname string, pb proto.Message) error { in, err := ioutil.ReadFile(fname) if err != nil { log.Fatalln("Something went wrong when reading the file", err) return err } err2 := proto.Unmarshal(in, pb) if err2 != nil { log.Fatalln("Couldn't put the bytes into the protocol buffers struct", err2) return err2 } return nil } func doSimple() *simplepb.SimpleMessage { sm := simplepb.SimpleMessage{ Id: 12345, IsSimple: true, Name: "My Simple Message", SampleList: []int32{1, 4, 7, 8}, } fmt.Println(sm) sm.Name = "I renamed you" fmt.Println(sm) fmt.Println("The ID is:", sm.GetId()) return &sm }
<reponame>guyplusplus/kibana<filename>src/plugins/kibana_usage_collection/server/collectors/application_usage/telemetry_application_usage_collector.ts /* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import moment from 'moment'; import { ISavedObjectsRepository, SavedObjectsServiceSetup } from 'kibana/server'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; import { findAll } from '../find_all'; import { ApplicationUsageTotal, ApplicationUsageTransactional, registerMappings, } from './saved_objects_types'; /** * Roll indices every 24h */ export const ROLL_INDICES_INTERVAL = 24 * 60 * 60 * 1000; /** * Start rolling indices after 5 minutes up */ export const ROLL_INDICES_START = 5 * 60 * 1000; export const SAVED_OBJECTS_TOTAL_TYPE = 'application_usage_totals'; export const SAVED_OBJECTS_TRANSACTIONAL_TYPE = 'application_usage_transactional'; interface ApplicationUsageTelemetryReport { [appId: string]: { clicks_total: number; clicks_7_days: number; clicks_30_days: number; clicks_90_days: number; minutes_on_screen_total: number; minutes_on_screen_7_days: number; minutes_on_screen_30_days: number; minutes_on_screen_90_days: number; }; } export function registerApplicationUsageCollector( usageCollection: UsageCollectionSetup, registerType: SavedObjectsServiceSetup['registerType'], getSavedObjectsClient: () => ISavedObjectsRepository | undefined ) { registerMappings(registerType); const collector = usageCollection.makeUsageCollector({ type: 'application_usage', isReady: () => typeof getSavedObjectsClient() !== 'undefined', fetch: async () => { const savedObjectsClient = getSavedObjectsClient(); if (typeof savedObjectsClient === 'undefined') { return; } const [rawApplicationUsageTotals, rawApplicationUsageTransactional] = await Promise.all([ findAll<ApplicationUsageTotal>(savedObjectsClient, { type: SAVED_OBJECTS_TOTAL_TYPE }), findAll<ApplicationUsageTransactional>(savedObjectsClient, { type: SAVED_OBJECTS_TRANSACTIONAL_TYPE, }), ]); const applicationUsageFromTotals = rawApplicationUsageTotals.reduce( (acc, { attributes: { appId, minutesOnScreen, numberOfClicks } }) => { const existing = acc[appId] || { clicks_total: 0, minutes_on_screen_total: 0 }; return { ...acc, [appId]: { clicks_total: numberOfClicks + existing.clicks_total, clicks_7_days: 0, clicks_30_days: 0, clicks_90_days: 0, minutes_on_screen_total: minutesOnScreen + existing.minutes_on_screen_total, minutes_on_screen_7_days: 0, minutes_on_screen_30_days: 0, minutes_on_screen_90_days: 0, }, }; }, {} as ApplicationUsageTelemetryReport ); const nowMinus7 = moment().subtract(7, 'days'); const nowMinus30 = moment().subtract(30, 'days'); const nowMinus90 = moment().subtract(90, 'days'); const applicationUsage = rawApplicationUsageTransactional.reduce( (acc, { attributes: { appId, minutesOnScreen, numberOfClicks, timestamp } }) => { const existing = acc[appId] || { clicks_total: 0, clicks_7_days: 0, clicks_30_days: 0, clicks_90_days: 0, minutes_on_screen_total: 0, minutes_on_screen_7_days: 0, minutes_on_screen_30_days: 0, minutes_on_screen_90_days: 0, }; const timeOfEntry = moment(timestamp as string); const isInLast7Days = timeOfEntry.isSameOrAfter(nowMinus7); const isInLast30Days = timeOfEntry.isSameOrAfter(nowMinus30); const isInLast90Days = timeOfEntry.isSameOrAfter(nowMinus90); const last7Days = { clicks_7_days: existing.clicks_7_days + numberOfClicks, minutes_on_screen_7_days: existing.minutes_on_screen_7_days + minutesOnScreen, }; const last30Days = { clicks_30_days: existing.clicks_30_days + numberOfClicks, minutes_on_screen_30_days: existing.minutes_on_screen_30_days + minutesOnScreen, }; const last90Days = { clicks_90_days: existing.clicks_90_days + numberOfClicks, minutes_on_screen_90_days: existing.minutes_on_screen_90_days + minutesOnScreen, }; return { ...acc, [appId]: { ...existing, clicks_total: existing.clicks_total + numberOfClicks, minutes_on_screen_total: existing.minutes_on_screen_total + minutesOnScreen, ...(isInLast7Days ? last7Days : {}), ...(isInLast30Days ? last30Days : {}), ...(isInLast90Days ? last90Days : {}), }, }; }, applicationUsageFromTotals ); return applicationUsage; }, }); usageCollection.registerCollector(collector); setInterval(() => rollTotals(getSavedObjectsClient()), ROLL_INDICES_INTERVAL); setTimeout(() => rollTotals(getSavedObjectsClient()), ROLL_INDICES_START); } async function rollTotals(savedObjectsClient?: ISavedObjectsRepository) { if (!savedObjectsClient) { return; } try { const [rawApplicationUsageTotals, rawApplicationUsageTransactional] = await Promise.all([ findAll<ApplicationUsageTotal>(savedObjectsClient, { type: SAVED_OBJECTS_TOTAL_TYPE }), findAll<ApplicationUsageTransactional>(savedObjectsClient, { type: SAVED_OBJECTS_TRANSACTIONAL_TYPE, filter: `${SAVED_OBJECTS_TRANSACTIONAL_TYPE}.attributes.timestamp < now-90d`, }), ]); const existingTotals = rawApplicationUsageTotals.reduce( (acc, { attributes: { appId, numberOfClicks, minutesOnScreen } }) => { return { ...acc, // No need to sum because there should be 1 document per appId only [appId]: { appId, numberOfClicks, minutesOnScreen }, }; }, {} as Record<string, { appId: string; minutesOnScreen: number; numberOfClicks: number }> ); const totals = rawApplicationUsageTransactional.reduce((acc, { attributes, id }) => { const { appId, numberOfClicks, minutesOnScreen } = attributes; const existing = acc[appId] || { minutesOnScreen: 0, numberOfClicks: 0 }; return { ...acc, [appId]: { appId, numberOfClicks: numberOfClicks + existing.numberOfClicks, minutesOnScreen: minutesOnScreen + existing.minutesOnScreen, }, }; }, existingTotals); await Promise.all([ Object.entries(totals).length && savedObjectsClient.bulkCreate<ApplicationUsageTotal>( Object.entries(totals).map(([id, entry]) => ({ type: SAVED_OBJECTS_TOTAL_TYPE, id, attributes: entry, })), { overwrite: true } ), ...rawApplicationUsageTransactional.map( ({ id }) => savedObjectsClient.delete(SAVED_OBJECTS_TRANSACTIONAL_TYPE, id) // There is no bulkDelete :( ), ]); } catch (err) { // Silent failure } }
Video: Naked man shot after breaking into Albuquerque home Copyright by KRQE - All rights reserved Video ALBUQUERQUE, N.M. (KRQE) - A naked Albuquerque man chose the wrong house to break into. When a homeowner saw an intruder coming through his bedroom window in the middle of the night, he grabbed his gun from the dresser and started shooting. "We're all concerned about you man, that's all," said an Albuquerque police officer to Carlos Alberto Juarez in newly released lapel video. Juarez was naked and bloody in a Northeast Heights neighborhood on November 23, when dispatch received multiple calls about a naked man jumping fences, trying to break into homes. "I need help," said Juarez. The officer responded, "We're getting rescue here right now." Police were able to quickly identify the unclothed burglar who told them he'd been drinking and didn't know if he'd taken any drugs. "What's going on?" asked an officer. Juarez replied, "The house." While several officers dealt with Juarez, other officers were speaking with an elderly couple at a home nearby. In lapel video, the husband walks officers into his bedroom. "He was coming through the window," said the elderly man; an officer responded,"He had his head in here?" while pointing to the window, and the man said yes. The man told police he and his wife were asleep in their bed when Juarez came breaking into their bedroom window. "I can't hear nothing, I fired off a couple of shots here," said the man while speaking with police. Albuquerque police also visited several other nearby apartments where residents reported seeing Juarez, naked running through their backyards. "We were sitting down watching TV when I went out to my backyard and a saw a guy, naked jumping the fence," said another homeowner in lapel video. But the main scene remained outside the home where police located Juarez, "We just want to make sure that you're okay," said one officer to the disoriented Juarez. As police finished questioning the elderly couple, Juarez was taken to the hospital. He told police he didn't know what street he was on or what happened before police found him. He had three gunshot wounds to his neck and face. He was later taken to jail, charged with residential burglary and breaking and entering. He appeared before a judge in late November but has since been released on bond.
/// The default Tari script is to push a single zero onto the stack; which will execute successfully with zero inputs. impl Default for TariScript { fn default() -> Self { script!(PushZero) } }
<gh_stars>0 import { Component, OnInit, ViewChild, Input } from '@angular/core'; import {_HttpClient, ModalHelper, SettingsService} from '@delon/theme'; import {JsonData} from "@shared/shared.module"; import {DictionaryService} from "@shared/services/dictionary.service"; import {NzMessageService} from "ng-zorro-antd"; import {FormControl} from "@angular/forms"; import {debounceTime} from "rxjs/operators"; import {LessonOperateService} from "@shared/services/lesson-operate.service"; import {ActivatedRoute} from "@angular/router"; import {ReuseTabService} from "@delon/abc"; import {SharedEditLessonComponent} from "@shared/components/edit-lesson/edit-lesson.component"; @Component({ selector: 'app-lessons-lesson', templateUrl: './lesson.component.html', }) export class LessonsLessonComponent implements OnInit { lesson: any; lessonStatusList: Array<any>; id = this.route.snapshot.params.id; today: any; constructor( private route: ActivatedRoute, private http: _HttpClient, public msgSrv: NzMessageService, private modal: ModalHelper, private dic: DictionaryService, private op: LessonOperateService, private reuseTabService: ReuseTabService, ) { this.op.setCom(this); } change() { this.modal.create( SharedEditLessonComponent, {size: 'sm'}, {modalOptions: { nzTitle: `修改课程信息`, nzComponentParams: {lessonId: this.id} } }).subscribe(res => this.op.reload(res) ); } ngOnInit() { this.lessonStatusList = this.dic.getLessonStatusList(); this.today = new Date(); this.today.setHours(23,59,59); this.load(); } load() { this.http.get<JsonData>(`/lessons/${this.id}`).subscribe( (data) =>{ this.lesson = data.data; this.reuseTabService.title = this.lesson.name; } ); } }
/** * Unit tests for the {@link Counter} API. */ @RunWith(JUnit4.class) public class CounterTest { @Rule public ExpectedException thrown = ExpectedException.none(); private static void flush(Counter<?> c) { switch (c.getKind()) { case SUM: case MAX: case MIN: case AND: case OR: c.getAndResetDelta(); break; case MEAN: c.getAndResetMeanDelta(); break; default: throw new IllegalArgumentException("Unknown counter kind " + c.getKind()); } } private static final double EPSILON = 0.00000000001; @Test public void testCompatibility() { // Equal counters are compatible, of all kinds. assertTrue( Counter.longs("c", SUM).isCompatibleWith(Counter.longs("c", SUM))); assertTrue( Counter.ints("c", SUM).isCompatibleWith(Counter.ints("c", SUM))); assertTrue( Counter.doubles("c", SUM).isCompatibleWith(Counter.doubles("c", SUM))); assertTrue( Counter.booleans("c", OR).isCompatibleWith( Counter.booleans("c", OR))); // The name, kind, and type of the counter must match. assertFalse( Counter.longs("c", SUM).isCompatibleWith(Counter.longs("c2", SUM))); assertFalse( Counter.longs("c", SUM).isCompatibleWith(Counter.longs("c", MAX))); assertFalse( Counter.longs("c", SUM).isCompatibleWith(Counter.ints("c", SUM))); // The value of the counters are ignored. assertTrue( Counter.longs("c", SUM).resetToValue(666L).isCompatibleWith( Counter.longs("c", SUM).resetToValue(42L))); } private void assertOK(long total, long delta, Counter<Long> c) { assertEquals(total, c.getAggregate().longValue()); assertEquals(delta, c.getAndResetDelta().longValue()); } private void assertOK(double total, double delta, Counter<Double> c) { assertEquals(total, asDouble(c.getAggregate()), EPSILON); assertEquals(delta, asDouble(c.getAndResetDelta()), EPSILON); } // Tests for SUM. @Test public void testSumLong() { Counter<Long> c = Counter.longs("sum-long", SUM); long expectedTotal = 0; long expectedDelta = 0; assertOK(expectedTotal, expectedDelta, c); c.addValue(13L).addValue(42L).addValue(0L); expectedTotal += 55; expectedDelta += 55; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(120L).addValue(17L).addValue(37L); expectedTotal = expectedDelta = 174; assertOK(expectedTotal, expectedDelta, c); flush(c); expectedDelta = 0; assertOK(expectedTotal, expectedDelta, c); c.addValue(15L).addValue(42L); expectedTotal += 57; expectedDelta += 57; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(100L).addValue(17L).addValue(49L); expectedTotal = expectedDelta = 166; assertOK(expectedTotal, expectedDelta, c); Counter<Long> other = Counter.longs("sum-long", SUM); other.addValue(12L); expectedDelta = 12L; expectedTotal += 12L; c.merge(other); assertOK(expectedTotal, expectedDelta, c); } @Test public void testSumDouble() { Counter<Double> c = Counter.doubles("sum-double", SUM); double expectedTotal = 0.0; double expectedDelta = 0.0; assertOK(expectedTotal, expectedDelta, c); c.addValue(Math.E).addValue(Math.PI).addValue(0.0); expectedTotal += Math.E + Math.PI; expectedDelta += Math.E + Math.PI; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(Math.sqrt(2)).addValue(2 * Math.PI).addValue(3 * Math.E); expectedTotal = expectedDelta = Math.sqrt(2) + 2 * Math.PI + 3 * Math.E; assertOK(expectedTotal, expectedDelta, c); flush(c); expectedDelta = 0.0; assertOK(expectedTotal, expectedDelta, c); c.addValue(7 * Math.PI).addValue(5 * Math.E); expectedTotal += 7 * Math.PI + 5 * Math.E; expectedDelta += 7 * Math.PI + 5 * Math.E; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(Math.sqrt(17)).addValue(17.0).addValue(49.0); expectedTotal = expectedDelta = Math.sqrt(17.0) + 17.0 + 49.0; assertOK(expectedTotal, expectedDelta, c); Counter<Double> other = Counter.doubles("sum-double", SUM); other.addValue(12 * Math.PI); expectedDelta = 12 * Math.PI; expectedTotal += 12 * Math.PI; c.merge(other); assertOK(expectedTotal, expectedDelta, c); } // Tests for MAX. @Test public void testMaxLong() { Counter<Long> c = Counter.longs("max-long", MAX); long expectedTotal = Long.MIN_VALUE; long expectedDelta = Long.MIN_VALUE; assertOK(expectedTotal, expectedDelta, c); c.addValue(13L).addValue(42L).addValue(Long.MIN_VALUE); expectedTotal = expectedDelta = 42; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(120L).addValue(17L).addValue(37L); expectedTotal = expectedDelta = 120; assertOK(expectedTotal, expectedDelta, c); flush(c); expectedDelta = Long.MIN_VALUE; assertOK(expectedTotal, expectedDelta, c); c.addValue(42L).addValue(15L); expectedDelta = 42; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(100L).addValue(171L).addValue(49L); expectedTotal = expectedDelta = 171; assertOK(expectedTotal, expectedDelta, c); Counter<Long> other = Counter.longs("max-long", MAX); other.addValue(12L); expectedDelta = 12L; c.merge(other); assertOK(expectedTotal, expectedDelta, c); } @Test public void testMaxDouble() { Counter<Double> c = Counter.doubles("max-double", MAX); double expectedTotal = Double.NEGATIVE_INFINITY; double expectedDelta = Double.NEGATIVE_INFINITY; assertOK(expectedTotal, expectedDelta, c); c.addValue(Math.E).addValue(Math.PI).addValue(Double.NEGATIVE_INFINITY); expectedTotal = expectedDelta = Math.PI; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(Math.sqrt(12345)).addValue(2 * Math.PI).addValue(3 * Math.E); expectedTotal = expectedDelta = Math.sqrt(12345); assertOK(expectedTotal, expectedDelta, c); flush(c); expectedDelta = Double.NEGATIVE_INFINITY; assertOK(expectedTotal, expectedDelta, c); c.addValue(7 * Math.PI).addValue(5 * Math.E); expectedDelta = 7 * Math.PI; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(Math.sqrt(17)).addValue(171.0).addValue(49.0); expectedTotal = expectedDelta = 171.0; assertOK(expectedTotal, expectedDelta, c); Counter<Double> other = Counter.doubles("max-double", MAX); other.addValue(12 * Math.PI); expectedDelta = 12 * Math.PI; c.merge(other); assertOK(expectedTotal, expectedDelta, c); } // Tests for MIN. @Test public void testMinLong() { Counter<Long> c = Counter.longs("min-long", MIN); long expectedTotal = Long.MAX_VALUE; long expectedDelta = Long.MAX_VALUE; assertOK(expectedTotal, expectedDelta, c); c.addValue(13L).addValue(42L).addValue(Long.MAX_VALUE); expectedTotal = expectedDelta = 13; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(120L).addValue(17L).addValue(37L); expectedTotal = expectedDelta = 17; assertOK(expectedTotal, expectedDelta, c); flush(c); expectedDelta = Long.MAX_VALUE; assertOK(expectedTotal, expectedDelta, c); c.addValue(42L).addValue(18L); expectedDelta = 18; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(100L).addValue(171L).addValue(49L); expectedTotal = expectedDelta = 49; assertOK(expectedTotal, expectedDelta, c); Counter<Long> other = Counter.longs("min-long", MIN); other.addValue(42L); expectedTotal = expectedDelta = 42L; c.merge(other); assertOK(expectedTotal, expectedDelta, c); } @Test public void testMinDouble() { Counter<Double> c = Counter.doubles("min-double", MIN); double expectedTotal = Double.POSITIVE_INFINITY; double expectedDelta = Double.POSITIVE_INFINITY; assertOK(expectedTotal, expectedDelta, c); c.addValue(Math.E).addValue(Math.PI).addValue(Double.POSITIVE_INFINITY); expectedTotal = expectedDelta = Math.E; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(Math.sqrt(12345)).addValue(2 * Math.PI).addValue(3 * Math.E); expectedTotal = expectedDelta = 2 * Math.PI; assertOK(expectedTotal, expectedDelta, c); flush(c); expectedDelta = Double.POSITIVE_INFINITY; assertOK(expectedTotal, expectedDelta, c); c.addValue(7 * Math.PI).addValue(5 * Math.E); expectedDelta = 5 * Math.E; assertOK(expectedTotal, expectedDelta, c); c.resetToValue(Math.sqrt(17)).addValue(171.0).addValue(0.0); expectedTotal = expectedDelta = 0.0; assertOK(expectedTotal, expectedDelta, c); Counter<Double> other = Counter.doubles("min-double", MIN); other.addValue(42 * Math.E); expectedDelta = 42 * Math.E; c.merge(other); assertOK(expectedTotal, expectedDelta, c); } // Tests for MEAN. private void assertMean(long s, long sd, long c, long cd, Counter<Long> cn) { CounterMean<Long> mean = cn.getMean(); CounterMean<Long> deltaMean = cn.getAndResetMeanDelta(); assertEquals(s, mean.getAggregate().longValue()); assertEquals(sd, deltaMean.getAggregate().longValue()); assertEquals(c, mean.getCount()); assertEquals(cd, deltaMean.getCount()); } private void assertMean(double s, double sd, long c, long cd, Counter<Double> cn) { CounterMean<Double> mean = cn.getMean(); CounterMean<Double> deltaMean = cn.getAndResetMeanDelta(); assertEquals(s, mean.getAggregate().doubleValue(), EPSILON); assertEquals(sd, deltaMean.getAggregate().doubleValue(), EPSILON); assertEquals(c, mean.getCount()); assertEquals(cd, deltaMean.getCount()); } @Test public void testMeanLong() { Counter<Long> c = Counter.longs("mean-long", MEAN); long expTotal = 0; long expDelta = 0; long expCountTotal = 0; long expCountDelta = 0; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); c.addValue(13L).addValue(42L).addValue(0L); expTotal += 55; expDelta += 55; expCountTotal += 3; expCountDelta += 3; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); c.resetMeanToValue(1L, 120L).addValue(17L).addValue(37L); expTotal = expDelta = 174; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); flush(c); expDelta = 0; expCountDelta = 0; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); c.addValue(15L).addValue(42L); expTotal += 57; expDelta += 57; expCountTotal += 2; expCountDelta += 2; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); c.resetMeanToValue(3L, 100L).addValue(17L).addValue(49L); expTotal = expDelta = 166; expCountTotal = expCountDelta = 5; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); Counter<Long> other = Counter.longs("mean-long", MEAN); other.addValue(12L).addValue(44L).addValue(-5L); expTotal += 12L + 44L - 5L; expDelta += 12L + 44L - 5L; expCountTotal += 3; expCountDelta += 3; c.merge(other); assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); } @Test public void testMeanDouble() { Counter<Double> c = Counter.doubles("mean-double", MEAN); double expTotal = 0.0; double expDelta = 0.0; long expCountTotal = 0; long expCountDelta = 0; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); c.addValue(Math.E).addValue(Math.PI).addValue(0.0); expTotal += Math.E + Math.PI; expDelta += Math.E + Math.PI; expCountTotal += 3; expCountDelta += 3; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); c.resetMeanToValue(1L, Math.sqrt(2)).addValue(2 * Math.PI) .addValue(3 * Math.E); expTotal = expDelta = Math.sqrt(2) + 2 * Math.PI + 3 * Math.E; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); flush(c); expDelta = 0.0; expCountDelta = 0; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); c.addValue(7 * Math.PI).addValue(5 * Math.E); expTotal += 7 * Math.PI + 5 * Math.E; expDelta += 7 * Math.PI + 5 * Math.E; expCountTotal += 2; expCountDelta += 2; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); c.resetMeanToValue(3L, Math.sqrt(17)).addValue(17.0).addValue(49.0); expTotal = expDelta = Math.sqrt(17.0) + 17.0 + 49.0; expCountTotal = expCountDelta = 5; assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); Counter<Double> other = Counter.doubles("mean-double", MEAN); other.addValue(3 * Math.PI).addValue(12 * Math.E); expTotal += 3 * Math.PI + 12 * Math.E; expDelta += 3 * Math.PI + 12 * Math.E; expCountTotal += 2; expCountDelta += 2; c.merge(other); assertMean(expTotal, expDelta, expCountTotal, expCountDelta, c); } // Test for AND and OR. private void assertBool(boolean total, boolean delta, Counter<Boolean> c) { assertEquals(total, c.getAggregate().booleanValue()); assertEquals(delta, c.getAndResetDelta().booleanValue()); } @Test public void testBoolAnd() { Counter<Boolean> c = Counter.booleans("bool-and", AND); boolean expectedTotal = true; boolean expectedDelta = true; assertBool(expectedTotal, expectedDelta, c); c.addValue(true); assertBool(expectedTotal, expectedDelta, c); c.addValue(false); expectedTotal = expectedDelta = false; assertBool(expectedTotal, expectedDelta, c); c.resetToValue(true).addValue(true); expectedTotal = expectedDelta = true; assertBool(expectedTotal, expectedDelta, c); c.addValue(false); expectedTotal = expectedDelta = false; assertBool(expectedTotal, expectedDelta, c); flush(c); expectedDelta = true; assertBool(expectedTotal, expectedDelta, c); c.addValue(false); expectedDelta = false; assertBool(expectedTotal, expectedDelta, c); } @Test public void testBoolOr() { Counter<Boolean> c = Counter.booleans("bool-or", OR); boolean expectedTotal = false; boolean expectedDelta = false; assertBool(expectedTotal, expectedDelta, c); c.addValue(false); assertBool(expectedTotal, expectedDelta, c); c.addValue(true); expectedTotal = expectedDelta = true; assertBool(expectedTotal, expectedDelta, c); c.resetToValue(false).addValue(false); expectedTotal = expectedDelta = false; assertBool(expectedTotal, expectedDelta, c); c.addValue(true); expectedTotal = expectedDelta = true; assertBool(expectedTotal, expectedDelta, c); flush(c); expectedDelta = false; assertBool(expectedTotal, expectedDelta, c); c.addValue(true); expectedDelta = true; assertBool(expectedTotal, expectedDelta, c); } // Incompatibility tests. @Test(expected = IllegalArgumentException.class) public void testSumBool() { Counter.booleans("counter", SUM); } @Test(expected = IllegalArgumentException.class) public void testMinBool() { Counter.booleans("counter", MIN); } @Test(expected = IllegalArgumentException.class) public void testMaxBool() { Counter.booleans("counter", MAX); } @Test(expected = IllegalArgumentException.class) public void testMeanBool() { Counter.booleans("counter", MEAN); } @Test(expected = IllegalArgumentException.class) public void testAndLong() { Counter.longs("counter", AND); } @Test(expected = IllegalArgumentException.class) public void testAndDouble() { Counter.doubles("counter", AND); } @Test(expected = IllegalArgumentException.class) public void testOrLong() { Counter.longs("counter", OR); } @Test(expected = IllegalArgumentException.class) public void testOrDouble() { Counter.doubles("counter", OR); } @Test public void testMergeIncompatibleCounters() { Counter<Long> longSums = Counter.longs("longsums", SUM); Counter<Long> longMean = Counter.longs("longmean", MEAN); Counter<Long> longMin = Counter.longs("longmin", MIN); Counter<Long> otherLongSums = Counter.longs("othersums", SUM); Counter<Long> otherLongMean = Counter.longs("otherlongmean", MEAN); Counter<Double> doubleSums = Counter.doubles("doublesums", SUM); Counter<Double> doubleMean = Counter.doubles("doublemean", MEAN); Counter<Boolean> boolAnd = Counter.booleans("and", AND); Counter<Boolean> boolOr = Counter.booleans("or", OR); List<Counter<Long>> longCounters = Arrays.asList(longSums, longMean, longMin, otherLongSums, otherLongMean); for (Counter<Long> left : longCounters) { for (Counter<Long> right : longCounters) { if (left != right) { assertIncompatibleMerge(left, right); } } } assertIncompatibleMerge(doubleSums, doubleMean); assertIncompatibleMerge(boolAnd, boolOr); } private <T> void assertIncompatibleMerge(Counter<T> left, Counter<T> right) { thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Counters"); thrown.expectMessage("are incompatible"); left.merge(right); } }
import { IconDefinition } from '@fortawesome/free-regular-svg-icons'; export interface ServiceModel { icon: IconDefinition; header: string; text: string; num?: string; }
<reponame>Longi94/rocket-viewer<gh_stars>1-10 use serde::Serialize; use crate::model::body_states::BodyStates; #[derive(Serialize, Debug, PartialEq, Copy, Clone)] pub enum BallType { Unknown, Default, Basketball, Puck, Cube, Breakout, } #[derive(Serialize, Debug)] pub struct BallData { pub ball_type: BallType, pub body_states: BodyStates, pub hit_team: Vec<u8>, pub hit_team_times: Vec<f32>, } impl BallData { pub fn new() -> Self { BallData { ball_type: BallType::Unknown, body_states: BodyStates::new(), hit_team: vec![2], hit_team_times: vec![0.0], } } pub fn reset(&mut self, time: f32) { self.hit_team.push(2); self.hit_team_times.push(time); } }
// Adds an edge between labels. bool AddMEdge( const int label0, const int label1, bool* matrix, int* labels, int* num_labels) { if (label0 != label1) { int label0idx=-1, label1idx=-1; for (int i = 0; i < *num_labels; ++i) { if (labels[i] == label0) label0idx = i; if (labels[i] == label1) label1idx = i; } if (label0idx == -1) { if (*num_labels == MAX_LABELS) return false; label0idx = (*num_labels)++; labels[label0idx] = label0; } if (label1idx == -1) { if (*num_labels == MAX_LABELS) return false; label1idx = (*num_labels)++; labels[label1idx] = label1; } matrix[label0idx*MAX_LABELS+label1idx] = true; matrix[label1idx*MAX_LABELS+label0idx] = true; } return true; }
As we often watch wealthy countries heap on themselves and each other generous portions of praise for helping ‘needy’ countries and using their donations to accelerate development in impoverished regions so as to end poverty, another scenario is playing itself out. This scenario is rarely reported. Africa, the receiver of $30 billion in annual monetary handouts, is not only making nothing from the aid it receives but it actually loses $192 billion to the rest of the world within the same time frame. How, you ask? Research published recently indicates that current practices within the continent tend to favour wealthy countries. These practices include tax dodging, the repatriation of multinational companies’ profits with their unjust trade policies, the costs incurred from climate change and the exodus of skilled workers. This means, basically, that if you take into account the money coming in through aid, investment and remittances ($134 billion), Africa is left with a $58 billion annual loss. To put this into perspective, the money that Africa loses each year is over one and half times the amount of additional money needed to deliver affordable health care to everyone in the whole world! As you can see, it is Africa that is aiding the rest of the world – not the other way round. “All those countries who keep their aid promises should be proud of what they’ve done” – David Cameron, UK Prime Minister. Despite decades of public fundraising and aid, the end to Africa’s poverty is nowhere in sight. This “aid” is actually a smokescreen used to hide from public view the fact the it is the donors themselves who are perpetuating this cycle of indigence. The report highlights that Africa is essentially not poor. A combination of inequitable policies, massive disparities in power and criminal activities perpetrated and sustained by wealthy elites both inside and outside the continent are keeping its people in poverty. The UK and other wealthy governments are at the heart of this theft. For example, the continent haemorrhages $35.3 billion annually through the tax evasion and other dodgy financial flows enabled by tax havens. These tax havens are jurisdictionally linked to the G8 and the European Union and account for 70% of global tax haven investment. The UK has 11 tax havens under its jurisdiction! Something is seriously wrong in this picture. It’s most often the resource-rich countries that show the most bleeding of finance. One would expect them to be displaying lower levels of poverty, but the opposite tends to be true. Out of one billion of the world’s poorest people, 33% live in resource rich countries. Which explains why 75% of the dozen countries at the bottom of the Human Development Index (HDI) are rich in natural resources. https://www.youtube.com/watch?v=E5hdcfFyahM It is time for the UK and other implicated governments to stop misrepresenting the real nature of the relationship between aid and poverty in Africa. Maybe then, that will put an end to endless rhetoric about what they are doing for Africa because, at the end of the day, it’s all about what Africa is doing for them! Politicians are only telling us half the story when it comes to the world’s financial relationship with Africa. See the full Health Poverty Africa report here. Health Poverty Africa have a petition addressed to the G20, demanding honest accounts of their relationship with Africa. Click here to sign. Source: Think Africa Press
/** * Reverse alternate 'K' nodes in a linked list. * * Example: * * 1 --> 2 --> 3 --> 4 --> 5 --> 6 --> 7 --> 8 * * k = 2 * * 2 --> 1 --> 3 --> 4 --> 6 --> 5 --> 7 -->8 * * Approach: * * 1) Solution will be similar to "Reverse Linked List in groups of size K" * 2) Take 'k' nodes for a iteration. * 3) Call rest of them recursively * 4) In alternate iterations: * Reverse 'k' nodes * Skip the 'k' nodes * 5) We use boolean variable to identify when to reverse or skip nodes. * * Time Complexity: O(n) * Space Complexity: O(n/k) */ public class ReverseAlternateKNodesInLinkedList { private static SinglyLinkedListNode head = null; private static SinglyLinkedListNode reverseAlternateKNodesInLinkedList(SinglyLinkedListNode head, int k, boolean needReverse) { int x = k; SinglyLinkedListNode previousNode = null; SinglyLinkedListNode currentNode = head; SinglyLinkedListNode nextNode = null; //Check if needReverse = true then we will reverse first k nodes if (needReverse) { while(x > 0 && currentNode != null) { nextNode = currentNode.next; //This is reversing link currentNode.next = previousNode; //Now move to next node previousNode = currentNode; currentNode = nextNode; x--; } if (currentNode != null) { //For remaining list head.next = reverseAlternateKNodesInLinkedList(currentNode, k, false); } //Return new head return previousNode; } else { //check if needReverse = false then we will not reverse the next k nodes //This is to return SinglyLinkedListNode previousNodeToReturn = currentNode; //Just move forward using next while(x > 1 && currentNode != null) { currentNode = currentNode.next; x--; } //Here it should be currentNode.next instead of head.next if (currentNode.next != null) { currentNode.next = reverseAlternateKNodesInLinkedList(currentNode.next, k, true); } return previousNodeToReturn; } } private static void addAtBegin(int data) { SinglyLinkedListNode newNode = new SinglyLinkedListNode(data); newNode.next = head; head = newNode; } private static void displayLinkedList(SinglyLinkedListNode head) { SinglyLinkedListNode currentNode = head; while (currentNode != null) { System.out.print("--> " + currentNode.data); currentNode = currentNode.next; } } public static void main(String[] args) { addAtBegin(12); addAtBegin(11); addAtBegin(10); addAtBegin(9); addAtBegin(8); addAtBegin(7); addAtBegin(6); addAtBegin(5); addAtBegin(4); addAtBegin(3); addAtBegin(2); addAtBegin(1); System.out.println("Original Linked List: "); displayLinkedList(head); System.out.println(); SinglyLinkedListNode node = reverseAlternateKNodesInLinkedList(head, 3, true); System.out.println("Reversed Linked List: "); displayLinkedList(node); System.out.println(); } }
package uk.gov.hmcts.reform.divorce.orchestration.tasks.generalreferral; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; import uk.gov.hmcts.reform.divorce.orchestration.client.FeesAndPaymentsClient; import uk.gov.hmcts.reform.divorce.orchestration.domain.model.CcdFields; import uk.gov.hmcts.reform.divorce.orchestration.domain.model.fees.OrderSummary; import uk.gov.hmcts.reform.divorce.orchestration.framework.workflow.task.TaskContext; import uk.gov.hmcts.reform.divorce.orchestration.framework.workflow.task.generics.FeeLookupWithoutNoticeTask; import java.util.Map; import static uk.gov.hmcts.reform.divorce.orchestration.tasks.util.TaskUtils.getCaseId; @Component @Slf4j public class GeneralReferralApplicationFeeLookupTask extends FeeLookupWithoutNoticeTask { public GeneralReferralApplicationFeeLookupTask(FeesAndPaymentsClient feesAndPaymentsClient) { super(feesAndPaymentsClient); } @Override protected Map<String, Object> furtherUpdateCaseData(TaskContext context, Map<String, Object> updatedCaseData) { String feeValue = getFeeValue(updatedCaseData); log.info("CaseId: {}, populate field {} with fee value from SummaryOrder (in pounds and pennies)", getCaseId(context), feeValue); updatedCaseData.put(CcdFields.FEE_AMOUNT_WITHOUT_NOTICE, feeValue); return updatedCaseData; } private String getFeeValue(Map<String, Object> caseData) { OrderSummary orderSummary = (OrderSummary) caseData.get(getOrderSummaryFieldName()); return orderSummary.getPaymentTotalInPounds(); } @Override public String getOrderSummaryFieldName() { return CcdFields.GENERAL_REFERRAL_WITHOUT_NOTICE_FEE_SUMMARY; } }
Two Glen Burnie residents were arrested and charged with drug possession offenses after police say they were found with small amounts of various drugs as well as 158 grams of Kratom, a botanical substance that has stirred controversy over its legality in the U.S. Anne Arundel police said officers executed a search warrant at 117 Crest Avenue on Tuesday at 8:15 a.m. Police said several drugs were seized, including .45 gram of heroin, 8.24 grams of marijuana, 96 blotter tabs of LSD and 2.64 grams of MDMA, sometimes known as “Molly.” Officers also took 158.09 grams of Kratom from the home, police said. Police arrested Michaela Elizabeth Gran, 21, and Chase Seven Gran, 23, both residents of the Crest Ave home. Kratom, which is not illegal in Maryland, is a botanical substance native to Southeast Asia that causes sedative effects, according to the U.S. Drug Enforcement Agency. Police said they did not charge the two with any offenses related to the possession of Kratum, but that the drug was seized to determine if it was laced with any other illicit substances. Some believe the plant — typically sold in capsule form and can be smoked, ingested or mixed with tea — can be used as alternative to opioid-based painkillers and can help wean people off of heroin addiction. However, the Food and Drug Administration issued a public health advisory in November saying the agency “is aware of reports of 36 deaths associated with the use of kratom-containing products.” twitter.com/PhilDavis_CG
<filename>Tasks/OrchestratorV2/tests/orchestrator/deployer.test.ts import "mocha"; import * as chai from "chai"; import * as TypeMoq from "typemoq"; import { TeamProject } from "azure-devops-node-api/interfaces/CoreInterfaces"; import { Release, ReleaseEnvironment } from "azure-devops-node-api/interfaces/ReleaseInterfaces"; import { IDebugCreator } from "../../interfaces/loggers/debugcreator"; import { IConsoleLogger } from "../../interfaces/loggers/consolelogger"; import { IDebugLogger } from "../../interfaces/loggers/debuglogger"; import { IReleaseHelper } from "../../interfaces/helpers/releasehelper"; import { IDetails } from "../../interfaces/task/details"; import { IReleaseJob } from "../../interfaces/common/releasejob"; import { ICommonHelper } from "../../interfaces/helpers/commonhelper"; import { IApprover } from "../../interfaces/orchestrator/approver"; import { IMonitor } from "../../interfaces/orchestrator/monitor"; import { IReporter } from "../../interfaces/orchestrator/reporter"; import { IDeployer } from "../../interfaces/orchestrator/deployer"; import { Deployer } from "../../orchestrator/deployer"; import { IReleaseProgress } from "../../interfaces/common/releaseprogress"; import { IStageProgress } from "../../interfaces/common/stageprogress"; import { ReleaseStatus } from "../../interfaces/common/releasestatus"; import { ISettings } from "../../interfaces/common/settings"; describe("Deployer", () => { const debugLoggerMock = TypeMoq.Mock.ofType<IDebugLogger>(); const debugCreatorMock = TypeMoq.Mock.ofType<IDebugCreator>(); debugCreatorMock.setup((x) => x.extend(TypeMoq.It.isAnyString())).returns(() => debugLoggerMock.target); debugLoggerMock.setup((x) => x.extend(TypeMoq.It.isAnyString())).returns(() => debugLoggerMock.target); const consoleLoggerMock = TypeMoq.Mock.ofType<IConsoleLogger>(); consoleLoggerMock.setup((x) => x.log(TypeMoq.It.isAny())).returns(() => null); const commonHelperMock = TypeMoq.Mock.ofType<ICommonHelper>(); const releaseHelperMock = TypeMoq.Mock.ofType<IReleaseHelper>(); const releaseApproverMock = TypeMoq.Mock.ofType<IApprover>(); const progressMonitorMock = TypeMoq.Mock.ofType<IMonitor>(); const progressReporterMock = TypeMoq.Mock.ofType<IReporter>(); progressReporterMock.setup((x) => x.getStageProgress(TypeMoq.It.isAny())).returns(() => ""); progressReporterMock.setup((x) => x.getStagesProgress(TypeMoq.It.isAny())).returns(() => ""); let detailsMock: TypeMoq.IMock<IDetails>; let releaseJobMock: TypeMoq.IMock<IReleaseJob>; let settingsMock: TypeMoq.IMock<ISettings>; let projectMock: TypeMoq.IMock<TeamProject>; let releaseMock: TypeMoq.IMock<Release>; let releaseProgressMock: TypeMoq.IMock<IReleaseProgress>; let releaseStatusMock: TypeMoq.IMock<Release>; let stageOneProgress: TypeMoq.IMock<IStageProgress>; let stageTwoProgress: TypeMoq.IMock<IStageProgress>; const deployer: IDeployer = new Deployer(commonHelperMock.target, releaseHelperMock.target, releaseApproverMock.target, progressMonitorMock.target, progressReporterMock.target, debugCreatorMock.target, consoleLoggerMock.target); beforeEach(async () => { detailsMock = TypeMoq.Mock.ofType<IDetails>(); releaseJobMock = TypeMoq.Mock.ofType<IReleaseJob>(); settingsMock = TypeMoq.Mock.ofType<ISettings>(); projectMock = TypeMoq.Mock.ofType<TeamProject>(); projectMock.target.id = "1"; releaseMock = TypeMoq.Mock.ofType<Release>(); releaseMock.target.id = 1; releaseProgressMock = TypeMoq.Mock.ofType<IReleaseProgress>(); releaseStatusMock = TypeMoq.Mock.ofType<Release>(); stageOneProgress = TypeMoq.Mock.ofType<IStageProgress>(); stageOneProgress.setup((x) => x.name).returns(() => "My-Stage-One"); stageTwoProgress = TypeMoq.Mock.ofType<IStageProgress>(); stageTwoProgress.setup((x) => x.name).returns(() => "My-Stage-Two"); releaseJobMock.target.settings = settingsMock.target; releaseJobMock.target.project = projectMock.target; releaseJobMock.target.release = releaseMock.target; commonHelperMock.reset(); releaseHelperMock.reset(); releaseApproverMock.reset(); progressMonitorMock.reset(); progressReporterMock.reset(); }); it("Should deploy manual release", async () => { //#region ARRANGE releaseProgressMock.setup((x) => x.stages).returns( () => [ stageOneProgress.target, stageTwoProgress.target ]); progressMonitorMock.setup((x) => x.createProgress(releaseJobMock.target)).returns( () => releaseProgressMock.target); progressMonitorMock.setup((x) => x.getPendingStages(releaseProgressMock.target)).returns( () => [ stageOneProgress.target ]); //#region STAGE const stageStatusMock = TypeMoq.Mock.ofType<ReleaseEnvironment>(); releaseHelperMock.setup((x) => x.getReleaseStatus(releaseJobMock.target.project.name!, releaseJobMock.target.release.id!)).returns( () => Promise.resolve(releaseStatusMock.target)); releaseHelperMock.setup((x) => x.getStageStatus(releaseStatusMock.target, stageOneProgress.target.name)).returns( () => Promise.resolve(stageStatusMock.target)); progressMonitorMock.setup((x) => x.updateStageProgress(stageOneProgress.target, stageStatusMock.target)).returns( () => null); progressMonitorMock.setup((x) => x.isStagePending(stageOneProgress.target)).returns( () => true); //#region START releaseHelperMock.setup((x) => x.startStage(stageStatusMock.target, releaseJobMock.target.project.name!, TypeMoq.It.isAnyString())).returns( () => Promise.resolve(stageStatusMock.target)); progressMonitorMock.setup((x) => x.updateStageProgress(stageOneProgress.target, stageStatusMock.target)).returns( () => null); //#endregion progressMonitorMock.setup((x) => x.isStageCompleted(stageOneProgress.target)).returns( () => false); //#region MONITOR releaseHelperMock.setup((x) => x.getReleaseStatus(releaseJobMock.target.project.name!, releaseJobMock.target.release.id!)).returns( () => Promise.resolve(releaseStatusMock.target)); releaseHelperMock.setup((x) => x.getStageStatus(releaseStatusMock.target, stageOneProgress.target.name)).returns( () => Promise.resolve(stageStatusMock.target)); releaseApproverMock.setup((x) => x.isStageApproved(stageOneProgress.target, stageStatusMock.target)).returns( () => Promise.resolve(false)); releaseApproverMock.setup((x) => x.approveStage(stageOneProgress.target, stageStatusMock.target, releaseJobMock.target.project.name!, detailsMock.target, releaseJobMock.target.settings)).returns( () => Promise.resolve()); progressMonitorMock.setup((x) => x.updateStageProgress(stageOneProgress.target, stageStatusMock.target)).returns( () => null); progressMonitorMock.setup((x) => x.updateReleaseProgress(releaseProgressMock.target)).returns( () => null); releaseProgressMock.setup((x) => x.status).returns( () => ReleaseStatus.Succeeded); progressMonitorMock.setup((x) => x.isStageCompleted(stageOneProgress.target)).returns( () => true); //#endregion //#endregion //#endregion //#region ACT const result = await deployer.deployManual(releaseJobMock.target, detailsMock.target); //#endregion //#region ASSERT chai.expect(result).to.not.eq(null); chai.expect(result.status).to.eq(ReleaseStatus.Succeeded); //#endregion }); it("Should deploy automated release", async () => { //#region ARRANGE releaseProgressMock.setup((x) => x.stages).returns( () => [ stageOneProgress.target, stageTwoProgress.target ]); progressMonitorMock.setup((x) => x.createProgress(releaseJobMock.target)).returns( () => releaseProgressMock.target); releaseHelperMock.setup((x) => x.getReleaseStatus(releaseJobMock.target.project.name!, releaseJobMock.target.release.id!)).returns( () => Promise.resolve(releaseStatusMock.target)); progressMonitorMock.setup((x) => x.getActiveStages(releaseProgressMock.target)).returns( () => [ stageOneProgress.target ]); //#region STAGE const stageStatusMock = TypeMoq.Mock.ofType<ReleaseEnvironment>(); releaseHelperMock.setup((x) => x.getStageStatus(releaseStatusMock.target, stageOneProgress.target.name)).returns( () => Promise.resolve(stageStatusMock.target)); releaseApproverMock.setup((x) => x.isStageApproved(stageOneProgress.target, stageStatusMock.target)).returns( () => Promise.resolve(false)); releaseApproverMock.setup((x) => x.approveStage(stageOneProgress.target, stageStatusMock.target, releaseJobMock.target.project.name!, detailsMock.target, releaseJobMock.target.settings)).returns( () => Promise.resolve()); progressMonitorMock.setup((x) => x.updateStageProgress(stageOneProgress.target, stageStatusMock.target)).returns( () => null); progressMonitorMock.setup((x) => x.isStageCompleted(stageOneProgress.target)).returns( () => true); //#endregion progressMonitorMock.setup((x) => x.updateReleaseProgress(releaseProgressMock.target)).returns( () => null); releaseProgressMock.setup((x) => x.status).returns( () => ReleaseStatus.InProgress); commonHelperMock.setup((x) => x.wait(releaseJobMock.target.settings.sleep)).returns( () => Promise.resolve()); releaseProgressMock.setup((x) => x.status).returns( () => ReleaseStatus.Succeeded); //#endregion //#region ACT const result = await deployer.deployAutomated(releaseJobMock.target, detailsMock.target); //#endregion //#region ASSERT chai.expect(result).to.not.eq(null); chai.expect(result.status).to.eq(ReleaseStatus.Succeeded); //#endregion }); });
/** * Utilerias para entidades Ebean * Created by pmendoza on 10/22/15. */ public class EntityUtils { @Documented @Target({ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) public @interface NullifyOnDelete { } public static void startNullifying(BaseEntity base){ try { Map<Field,PropertyDescriptor> set = ReflectionUtils.findFieldsWithAnnotation(base.getClass(), NullifyOnDelete.class); for(Map.Entry<Field,PropertyDescriptor> entry : set.entrySet()){ try { Object obj = entry.getValue().getReadMethod().invoke(base); if (obj instanceof List) { List list = (List) obj; if (list.isEmpty()) { return; } Class childClass = list.get(0).getClass(); PropertyDescriptor setter = null; for (PropertyDescriptor cpd : Introspector.getBeanInfo(childClass).getPropertyDescriptors()) { if(cpd.getPropertyType().isAssignableFrom(base.getClass())){ setter = cpd; break; } } if (setter == null) return; for (Object child : list) { Logger.error("ssiii"); if(child instanceof BaseEntity) { Logger.error("AAAAAhh " + child.toString()); setter.getWriteMethod().invoke(child, new Object[]{null}); ((BaseEntity)child).save(); } } } } catch (IllegalAccessException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } }catch (Exception e){ e.printStackTrace(); } } }
def plot_feature_VS_Observed(feature, df, linecolor): assert isinstance(df,pd.DataFrame) assert isinstance(feature,str) assert isinstance(linecolor,str) fig = go.Figure() fig.add_trace(go.Scatter( x=df['Year'], y=df[feature], name=feature, line_color=linecolor, opacity=1)) fig.add_trace(go.Scatter( x=df['Year'], y=df['Observed'], name="Observed", line_color='dimgray', opacity=0.5) ) fig.update_layout(plot_bgcolor='rgba(0, 0, 0,0)', xaxis_title="1880- 2005", yaxis_title="Average Temp (K)", title_text= feature + " vs Observed", showlegend=True) fig.show()
Chemical quality of maternal drinking water and congenital heart disease. We undertook a case-control study to investigate the association between chemicals in maternal drinking water consumed during pregnancy and congenital heart disease in the offspring. Two hundred and seventy affected children and 665 healthy children were enrolled in the study. Information on contaminant levels in maternal drinking water was available from records of routine water analysis of samples taken from public taps in the communities where the mothers resided during pregnancy. Mothers provided information during a telephone interview on their health, pregnancy management, and demographic characteristics. Nine inorganic metals were analysed for detection of an association with congenital heart disease. The chemical exposures of particular interest were arsenic, lead, mercury and selenium. None of the chemicals was associated materially with an increase in the frequency of congenital heart disease overall. Arsenic exposure at any detectable level was associated with a threefold increase in occurrence of coarctation of the aorta (prevalence odds ratio = 3.4, 95% confidence interval = 1.3-8.9). Detectable traces of selenium in drinking water were associated with a lower frequency of any congenital heart disease than was observed among children exposed to drinking water not containing detectable levels of selenium (prevalence odds ratio = 0.62, 95% confidence limits = 0.40-0.97). A dose-response effect was observed over four levels of selenium exposure. Non-differential errors in the measurement and classification of exposure to contaminants routinely monitored in drinking water could account for lack of positive findings. In addition, most of the contaminant levels were below the maximum levels set by the Environmental Protection Agency, so that lack of evidence of effect may have been due to the low exposure levels in this population.
/********************************************************************** * File: fixxht.cpp (Formerly fixxht.c) * Description: Improve x_ht and look out for case inconsistencies * Author: Phil Cheatle * Created: Thu Aug 5 14:11:08 BST 1993 * * (C) Copyright 1992, Hewlett-Packard Ltd. ** Licensed under the Apache License, Version 2.0 (the "License"); ** you may not use this file except in compliance with the License. ** You may obtain a copy of the License at ** http://www.apache.org/licenses/LICENSE-2.0 ** Unless required by applicable law or agreed to in writing, software ** distributed under the License is distributed on an "AS IS" BASIS, ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ** See the License for the specific language governing permissions and ** limitations under the License. * **********************************************************************/ #include <string.h> #include <ctype.h> #include "params.h" #include "float2int.h" #include "tesseractclass.h" namespace tesseract { // Fixxht overview. // Premise: Initial estimate of x-height is adequate most of the time, but // occasionally it is incorrect. Most notable causes of failure are: // 1. Small caps, where the top of the caps is the same as the body text // xheight. For small caps words the xheight needs to be reduced to correctly // recognize the caps in the small caps word. // 2. All xheight lines, such as summer. Here the initial estimate will have // guessed that the blob tops are caps and will have placed the xheight too low. // 3. Noise/logos beside words, or changes in font size on a line. Such // things can blow the statistics and cause an incorrect estimate. // 4. Incorrect baseline. Can happen when 2 columns are incorrectly merged. // In this case the x-height is often still correct. // // Algorithm. // Compare the vertical position (top only) of alphnumerics in a word with // the range of positions in training data (in the unicharset). // See CountMisfitTops. If any characters disagree sufficiently with the // initial xheight estimate, then recalculate the xheight, re-run OCR on // the word, and if the number of vertical misfits goes down, along with // either the word rating or certainty, then keep the new xheight. // The new xheight is calculated as follows:ComputeCompatibleXHeight // For each alphanumeric character that has a vertically misplaced top // (a misfit), yet its bottom is within the acceptable range (ie it is not // likely a sub-or super-script) calculate the range of acceptable xheight // positions from its range of tops, and give each value in the range a // number of votes equal to the distance of its top from its acceptance range. // The x-height position with the median of the votes becomes the new // x-height. This assumes that most characters will be correctly recognized // even if the x-height is incorrect. This is not a terrible assumption, but // it is not great. An improvement would be to use a classifier that does // not care about vertical position or scaling at all. // Separately collect stats on shifted baselines and apply the same logic to // computing a best-fit shift to fix the error. If the baseline needs to be // shifted, but the x-height is OK, returns the original x-height along with // the baseline shift to indicate that recognition needs to re-run. // If the max-min top of a unicharset char is bigger than kMaxCharTopRange // then the char top cannot be used to judge misfits or suggest a new top. const int kMaxCharTopRange = 48; // Returns the number of misfit blob tops in this word. int Tesseract::CountMisfitTops(WERD_RES *word_res) { int bad_blobs = 0; int num_blobs = word_res->rebuild_word->NumBlobs(); for (int blob_id = 0; blob_id < num_blobs; ++blob_id) { TBLOB* blob = word_res->rebuild_word->blobs[blob_id]; UNICHAR_ID class_id = word_res->best_choice->unichar_id(blob_id); if (unicharset.get_isalpha(class_id) || unicharset.get_isdigit(class_id)) { int top = blob->bounding_box().top(); if (top >= INT_FEAT_RANGE) top = INT_FEAT_RANGE - 1; int min_bottom, max_bottom, min_top, max_top; unicharset.get_top_bottom(class_id, &min_bottom, &max_bottom, &min_top, &max_top); if (max_top - min_top > kMaxCharTopRange) continue; bool bad = top < min_top - x_ht_acceptance_tolerance || top > max_top + x_ht_acceptance_tolerance; if (bad) ++bad_blobs; if (debug_x_ht_level >= 1) { tprintf("Class %s is %s with top %d vs limits of %d->%d, +/-%d\n", unicharset.id_to_unichar(class_id), bad ? "Misfit" : "OK", top, min_top, max_top, static_cast<int>(x_ht_acceptance_tolerance)); } } } return bad_blobs; } // Returns a new x-height maximally compatible with the result in word_res. // See comment above for overall algorithm. float Tesseract::ComputeCompatibleXheight(WERD_RES *word_res, float* baseline_shift) { STATS top_stats(0, MAX_UINT8); STATS shift_stats(-MAX_UINT8, MAX_UINT8); int bottom_shift = 0; int num_blobs = word_res->rebuild_word->NumBlobs(); do { top_stats.clear(); shift_stats.clear(); for (int blob_id = 0; blob_id < num_blobs; ++blob_id) { TBLOB* blob = word_res->rebuild_word->blobs[blob_id]; UNICHAR_ID class_id = word_res->best_choice->unichar_id(blob_id); if (unicharset.get_isalpha(class_id) || unicharset.get_isdigit(class_id)) { int top = blob->bounding_box().top() + bottom_shift; // Clip the top to the limit of normalized feature space. if (top >= INT_FEAT_RANGE) top = INT_FEAT_RANGE - 1; int bottom = blob->bounding_box().bottom() + bottom_shift; int min_bottom, max_bottom, min_top, max_top; unicharset.get_top_bottom(class_id, &min_bottom, &max_bottom, &min_top, &max_top); // Chars with a wild top range would mess up the result so ignore them. if (max_top - min_top > kMaxCharTopRange) continue; int misfit_dist = MAX((min_top - x_ht_acceptance_tolerance) - top, top - (max_top + x_ht_acceptance_tolerance)); int height = top - kBlnBaselineOffset; if (debug_x_ht_level >= 2) { tprintf("Class %s: height=%d, bottom=%d,%d top=%d,%d, actual=%d,%d: ", unicharset.id_to_unichar(class_id), height, min_bottom, max_bottom, min_top, max_top, bottom, top); } // Use only chars that fit in the expected bottom range, and where // the range of tops is sensibly near the xheight. if (min_bottom <= bottom + x_ht_acceptance_tolerance && bottom - x_ht_acceptance_tolerance <= max_bottom && min_top > kBlnBaselineOffset && max_top - kBlnBaselineOffset >= kBlnXHeight && misfit_dist > 0) { // Compute the x-height position using proportionality between the // actual height and expected height. int min_xht = DivRounded(height * kBlnXHeight, max_top - kBlnBaselineOffset); int max_xht = DivRounded(height * kBlnXHeight, min_top - kBlnBaselineOffset); if (debug_x_ht_level >= 2) { tprintf(" xht range min=%d, max=%d\n", min_xht, max_xht); } // The range of expected heights gets a vote equal to the distance // of the actual top from the expected top. for (int y = min_xht; y <= max_xht; ++y) top_stats.add(y, misfit_dist); } else if ((min_bottom > bottom + x_ht_acceptance_tolerance || bottom - x_ht_acceptance_tolerance > max_bottom) && bottom_shift == 0) { // Get the range of required bottom shift. int min_shift = min_bottom - bottom; int max_shift = max_bottom - bottom; if (debug_x_ht_level >= 2) { tprintf(" bottom shift min=%d, max=%d\n", min_shift, max_shift); } // The range of expected shifts gets a vote equal to the min distance // of the actual bottom from the expected bottom, spread over the // range of its acceptance. int misfit_weight = abs(min_shift); if (max_shift > min_shift) misfit_weight /= max_shift - min_shift; for (int y = min_shift; y <= max_shift; ++y) shift_stats.add(y, misfit_weight); } else { if (bottom_shift == 0) { // Things with bottoms that are already ok need to say so, on the // 1st iteration only. shift_stats.add(0, kBlnBaselineOffset); } if (debug_x_ht_level >= 2) { tprintf(" already OK\n"); } } } } if (shift_stats.get_total() > top_stats.get_total()) { bottom_shift = IntCastRounded(shift_stats.median()); if (debug_x_ht_level >= 2) { tprintf("Applying bottom shift=%d\n", bottom_shift); } } } while (bottom_shift != 0 && top_stats.get_total() < shift_stats.get_total()); // Baseline shift is opposite sign to the bottom shift. *baseline_shift = -bottom_shift / word_res->denorm.y_scale(); if (debug_x_ht_level >= 2) { tprintf("baseline shift=%g\n", *baseline_shift); } if (top_stats.get_total() == 0) return bottom_shift != 0 ? word_res->x_height : 0.0f; // The new xheight is just the median vote, which is then scaled out // of BLN space back to pixel space to get the x-height in pixel space. float new_xht = top_stats.median(); if (debug_x_ht_level >= 2) { tprintf("Median xht=%f\n", new_xht); tprintf("Mode20:A: New x-height = %f (norm), %f (orig)\n", new_xht, new_xht / word_res->denorm.y_scale()); } // The xheight must change by at least x_ht_min_change to be used. if (fabs(new_xht - kBlnXHeight) >= x_ht_min_change) return new_xht / word_res->denorm.y_scale(); else return bottom_shift != 0 ? word_res->x_height : 0.0f; } } // namespace tesseract
import { ButtonProps } from '@material-ui/core'; export default interface IToggleButtonProps extends ButtonProps { onChange: (value: string | number | any, id?: string | number | any) => void; id?: string | undefined; leftLabel: string; rightLabel: string; leftValue: number | string; rightValue: number | string; value: string | number; title: string; }
package thunder //Thunder resource SnmpServerEnableTrapsRoutingIsis import ( "context" go_thunder "github.com/go_thunder/thunder" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "util" ) func resourceSnmpServerEnableTrapsRoutingIsis() *schema.Resource { return &schema.Resource{ CreateContext: resourceSnmpServerEnableTrapsRoutingIsisCreate, UpdateContext: resourceSnmpServerEnableTrapsRoutingIsisUpdate, ReadContext: resourceSnmpServerEnableTrapsRoutingIsisRead, DeleteContext: resourceSnmpServerEnableTrapsRoutingIsisDelete, Schema: map[string]*schema.Schema{ "isis_authentication_failure": { Type: schema.TypeInt, Optional: true, Description: "", }, "uuid": { Type: schema.TypeString, Optional: true, Description: "", }, "isis_protocols_supported_mismatch": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_rejected_adjacency": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_max_area_addresses_mismatch": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_corrupted_lsp_detected": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_originating_lsp_buffer_size_mismatch": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_area_mismatch": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_lsp_too_large_to_propagate": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_own_lsp_purge": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_sequence_number_skip": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_database_overload": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_attempt_to_exceed_max_sequence": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_id_len_mismatch": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_authentication_type_failure": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_version_skew": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_manual_address_drops": { Type: schema.TypeInt, Optional: true, Description: "", }, "isis_adjacency_change": { Type: schema.TypeInt, Optional: true, Description: "", }, }, } } func resourceSnmpServerEnableTrapsRoutingIsisCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { logger := util.GetLoggerInstance() client := meta.(Thunder) var diags diag.Diagnostics if client.Host != "" { logger.Println("[INFO] Creating SnmpServerEnableTrapsRoutingIsis (Inside resourceSnmpServerEnableTrapsRoutingIsisCreate) ") data := dataToSnmpServerEnableTrapsRoutingIsis(d) logger.Println("[INFO] received formatted data from method data to SnmpServerEnableTrapsRoutingIsis --") d.SetId("1") err := go_thunder.PostSnmpServerEnableTrapsRoutingIsis(client.Token, data, client.Host) if err != nil { return diag.FromErr(err) } return resourceSnmpServerEnableTrapsRoutingIsisRead(ctx, d, meta) } return diags } func resourceSnmpServerEnableTrapsRoutingIsisRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { logger := util.GetLoggerInstance() client := meta.(Thunder) var diags diag.Diagnostics logger.Println("[INFO] Reading SnmpServerEnableTrapsRoutingIsis (Inside resourceSnmpServerEnableTrapsRoutingIsisRead)") if client.Host != "" { logger.Println("[INFO] Fetching service Read") data, err := go_thunder.GetSnmpServerEnableTrapsRoutingIsis(client.Token, client.Host) if err != nil { return diag.FromErr(err) } if data == nil { logger.Println("[INFO] No data found ") return nil } return diags } return nil } func resourceSnmpServerEnableTrapsRoutingIsisUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { return resourceSnmpServerEnableTrapsRoutingIsisRead(ctx, d, meta) } func resourceSnmpServerEnableTrapsRoutingIsisDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { return resourceSnmpServerEnableTrapsRoutingIsisRead(ctx, d, meta) } func dataToSnmpServerEnableTrapsRoutingIsis(d *schema.ResourceData) go_thunder.SnmpServerEnableTrapsRoutingIsis { var vc go_thunder.SnmpServerEnableTrapsRoutingIsis var c go_thunder.SnmpServerEnableTrapsRoutingIsisInstance c.IsisAuthenticationFailure = d.Get("isis_authentication_failure").(int) c.IsisProtocolsSupportedMismatch = d.Get("isis_protocols_supported_mismatch").(int) c.IsisRejectedAdjacency = d.Get("isis_rejected_adjacency").(int) c.IsisMaxAreaAddressesMismatch = d.Get("isis_max_area_addresses_mismatch").(int) c.IsisCorruptedLSPDetected = d.Get("isis_corrupted_lsp_detected").(int) c.IsisOriginatingLSPBufferSizeMismatch = d.Get("isis_originating_lsp_buffer_size_mismatch").(int) c.IsisAreaMismatch = d.Get("isis_area_mismatch").(int) c.IsisLSPTooLargeToPropagate = d.Get("isis_lsp_too_large_to_propagate").(int) c.IsisOwnLSPPurge = d.Get("isis_own_lsp_purge").(int) c.IsisSequenceNumberSkip = d.Get("isis_sequence_number_skip").(int) c.IsisDatabaseOverload = d.Get("isis_database_overload").(int) c.IsisAttemptToExceedMaxSequence = d.Get("isis_attempt_to_exceed_max_sequence").(int) c.IsisIDLenMismatch = d.Get("isis_id_len_mismatch").(int) c.IsisAuthenticationTypeFailure = d.Get("isis_authentication_type_failure").(int) c.IsisVersionSkew = d.Get("isis_version_skew").(int) c.IsisManualAddressDrops = d.Get("isis_manual_address_drops").(int) c.IsisAdjacencyChange = d.Get("isis_adjacency_change").(int) vc.IsisAuthenticationFailure = c return vc }