content
stringlengths
10
4.9M
import { Tile } from "@akashic-extension/akashic-tile"; import { sampleScene3 } from "./sampleScene3"; var game = g.game; export function sampleScene2() { var scene = new g.Scene({ game: game, assetIds: ["map", "frame", "right"] }); scene.loaded.add(function() { // background var rect = new g.FilledRect({ scene: scene, cssColor: "black", width: game.width, height: game.height }); scene.append(rect); // tile var mapAsset = scene.asset.getImageById("map"); var tileArray: number[][] = []; for (var i = 0; i < 50; ++i) { tileArray[i] = []; for (var j = 0; j < 50; ++j) { tileArray[i].push(Math.floor(2 * game.random.generate())); } } var tile = new Tile({ scene: scene, src: mapAsset, tileWidth: 32, tileHeight: 32, tileData: tileArray }); tile.redrawArea = { x: game.width / 4, y: game.height / 4, width: game.width/ 2, height: game.height / 2 }; tile.touchable = true; scene.append(tile); tile.update.add(function() { if (game.age % 15 !== 0) return; for (var i = 0; i < this.tileData.length; ++i) { for (var j = 0; j < this.tileData[i].length; ++j) { if (this.tileData[i][j] === 1) { this.tileData[i][j] = 2; } else if (this.tileData[i][j] === 2){ this.tileData[i][j] = 1; } } } this.invalidate(); }, tile); tile.pointDown.add(function(e: any) { this.bx = this.x; this.by = this.y; this.cx = this.redrawArea.x; this.cy = this.redrawArea.y; }, tile); tile.pointMove.add(function(e: any){ this.x = this.bx + e.startDelta.x; this.y = this.by + e.startDelta.y; this.redrawArea = { x: this.cx - e.startDelta.x, y: this.cy - e.startDelta.y, width: game.width / 2, height: game.height / 2 }; this.invalidate(); }, tile); tile.pointUp.add(function(e: any) { this.invalidate(); }, tile); // frame var frameAsset = scene.asset.getImageById("frame"); var frame = new g.Sprite({ scene: scene, src: frameAsset }); frame.x = (game.width - frameAsset.width) / 2; frame.y = (game.height - frameAsset.height) / 2; scene.append(frame); var next = new g.Sprite({ scene: scene, src: scene.asset.getImageById("right") }); next.x = game.width - 20; next.y = game.height - 20; scene.append(next); next.touchable = true; next.pointDown.add(function() { var scene3 = sampleScene3(); game.replaceScene(scene3); }, next); }); return scene; };
/** * Observer class that register callback member functions. */ class ObserverClass { public: explicit ObserverClass(std::shared_ptr<DeviceSettingsManager>& manager); ~ObserverClass() = default; void onAlarmVolumeRamp(const types::AlarmVolumeRampTypes& value); void onWakewordConfirmation(const WakeWordConfirmationSettingType& value); void onTimezone(const std::string& value); std::shared_ptr<DeviceSettingsManager> m_manager; std::shared_ptr<SettingCallbacks<DeviceSettingsManager>> m_callbacks; types::AlarmVolumeRampTypes m_alarmVolumeRampValue; WakeWordConfirmationSettingType m_wakewordConfirmationValue; std::string m_timezoneValue; }
Further Studies on H&#8734;Guaranteed Cost Computation by Means of Parameter-dependent Lyapunov Functions The linear matrix inequality approach to compute H-infinity guaranteed costs by means of parameter dependent Lyapunov functions is further studied in this paper. This can be regarded as an extension of the previous works. The conditions proposed are alternatives for continuous-time systems and discrete-time systems. A numerical example shows the effectiveness of the new conditions
//! Determine if two directories have different contents. //! //! For now, only one function exists: are they different, or not? In the future, //! more functionality to actually determine the difference may be added. //! //! # Examples //! //! ```no_run //! extern crate dir_diff; //! //! assert!(dir_diff::is_different("dir/a", "dir/b").unwrap()); //! ``` extern crate walkdir; use std::cmp::Ordering; use std::fs::File; use std::io::prelude::*; use std::path::Path; use walkdir::{DirEntry, WalkDir}; /// The various errors that can happen when diffing two directories #[derive(Debug)] pub enum Error { Io(std::io::Error), StripPrefix(std::path::StripPrefixError), WalkDir(walkdir::Error), } /// Are the contents of two directories different? /// /// # Examples /// /// ```no_run /// extern crate dir_diff; /// /// assert!(dir_diff::is_different("dir/a", "dir/b").unwrap()); /// ``` pub fn is_different<A: AsRef<Path>, B: AsRef<Path>>(a_base: A, b_base: B) -> Result<bool, Error> { let mut a_walker = walk_dir(a_base)?; let mut b_walker = walk_dir(b_base)?; for (a, b) in (&mut a_walker).zip(&mut b_walker) { let a = a?; let b = b?; if a.depth() != b.depth() || a.file_type() != b.file_type() || a.file_name() != b.file_name() || (a.file_type().is_file() && read_to_vec(a.path())? != read_to_vec(b.path())?) { return Ok(true); } } Ok(a_walker.next().is_some() || b_walker.next().is_some()) } fn walk_dir<P: AsRef<Path>>(path: P) -> Result<walkdir::IntoIter, std::io::Error> { let mut walkdir = WalkDir::new(path).sort_by(compare_by_file_name).into_iter(); if let Some(Err(e)) = walkdir.next() { Err(e.into()) } else { Ok(walkdir) } } fn compare_by_file_name(a: &DirEntry, b: &DirEntry) -> Ordering { a.file_name().cmp(b.file_name()) } fn read_to_vec<P: AsRef<Path>>(file: P) -> Result<Vec<u8>, std::io::Error> { let mut data = Vec::new(); let mut file = File::open(file.as_ref())?; file.read_to_end(&mut data)?; Ok(data) } impl From<std::io::Error> for Error { fn from(e: std::io::Error) -> Error { Error::Io(e) } } impl From<std::path::StripPrefixError> for Error { fn from(e: std::path::StripPrefixError) -> Error { Error::StripPrefix(e) } } impl From<walkdir::Error> for Error { fn from(e: walkdir::Error) -> Error { Error::WalkDir(e) } }
/* * A keyboard layout for the gridded planck. * * Copyright (C) 2017 <NAME> * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * ,-----------------------------------------------------------------------------------. * | | | | | | | | | | | | | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | | | | | | | | | | | | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | | | | | | | | | | | | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | | | | | | | | | | | | * `-----------------------------------------------------------------------------------' * * This layout uses colemak by default, and is designed for programming, with easy access to symbols * through either double purpose modifiers or colemak style rolling for commonly used symbol clusters * Eg: compare colemak 'this' to '(){\n}' on the symbol layer. * * The layout also supports a range of multilingual characters, covering those * needed for French, German, Swedish and likely some other European Languages. * In the future full support for Colemak's multilingual deadkeys may be introduced. * * The multilingual characters are inputted through QMK's unicode engine, using * the Linux input method by default, however this can be changed at runtime. * */ #pragma message "You may need to add LAYOUT_planck_grid to your keymap layers - see default for an example" #include "planck.h" // Layers #define _COLEMAK 0 #define _QWERTY 1 #define _SYM 2 #define _NUM 3 #define _GR 4 #define _GR_S 5 #define _ADJ 6 #define _NAV 7 #define _PLOVER 8 // Macro ID numbers #define M_ALT_HASH 1 #define M_GR_DASH 2 #define M_SYM_LPRN 3 #define M_NAV_UNDS 4 #define M_NUM_RPRN 5 #define M_CTRL_DLR 6 #define M_LCBR_ENT 7 #define M_PLOVER 8 #define M_EXT_PLV 9 #define M_WINDOWS 10 #define M_LINUX 11 #define M_OSX 12 #define M_FUNCTION 13 #define M_THEN 14 #define M_CATCH 15 // Macro keys #define ALT_HASH MACROTAP(M_ALT_HASH) // tap for #, hold for Alt #define GR_DASH MACROTAP(M_GR_DASH) // tap for -, hold for GR layer a-class-name #define SYM_LPRN MACROTAP(M_SYM_LPRN) // tap for (, hold for symbols layer if() #define NAV_UNDS MACROTAP(M_NAV_UNDS) // tap for _, hold for navigation layer snake_case_variable #define NUM_RPRN MACROTAP(M_NUM_RPRN) // tap for ), hold for numbers layer else if() #define CTRL_DLR MACROTAP(M_CTRL_DLR) // tap for $, hold for ctrl $php_is_really_weird #define LCBR_ENT M(M_LCBR_ENT) // {\n easier code formatting #define PLOVER M(M_PLOVER) // PHROPB (plover) or ERFVIK(qwerty) starts plover #define EXT_PLV M(M_EXT_PLV) // PHRO*F (plover) or ERFVYU(qwerty) stops plover #define WINDOWS M(M_WINDOWS) // Sets Unicode handler to windows #define LINUX M(M_LINUX) // Sets Unicode handler to linux #define OSX M(M_OSX) // Sets Unicode handler to OSX #define FUNCTION M(M_FUNCTION) #define THEN M(M_THEN) #define CATCH M(M_CATCH) // Renames of QMK keys... (would have otherwise been a macro) #define QWERTY DF(_QWERTY) #define COLEMAK DF(_COLEMAK) const uint16_t PROGMEM keymaps[][MATRIX_ROWS][MATRIX_COLS] = { /* Colemak * ,-----------------------------------------------------------------------------------. * | Tab | Q | W | F | P | G | J | L | U | Y | ;: | Bksp | * |------+------+------+------+------+------+------+------+------+------+------+------| * | Bksp | A | R | S | T | D | H | N | E | I | O | '" | * |------+------+------+------+------+------+------+------+------+------+------+------`---. * |Shift=| Z | X | C | V | B | K | M | ,< | .> | /? |Shift/Ent | * |------+------+------+------+------+------+------+------+------+------+------+----------| * |CtCaps|GUIF4 | Alt# | Gr- | Sym( | Nav_ |Space | Num) |Ctrl$ | F11 | F12 |Nav toggle| * `---------------------------------------------------------------------------------------' */ [_COLEMAK] = { {KC_TAB, KC_Q, KC_W, KC_F, KC_P, KC_G, KC_J, KC_L, KC_U, KC_Y, KC_SCLN, KC_BSPC}, {KC_BSPC, KC_A, KC_R, KC_S, KC_T, KC_D, KC_H, KC_N, KC_E, KC_I, KC_O, KC_QUOT}, {SFT_T(KC_EQL), KC_Z, KC_X, KC_C, KC_V, KC_B, KC_K, KC_M, KC_COMM, KC_DOT, KC_SLSH, SFT_T(KC_ENT) }, {CTL_T(KC_CAPS), KC_LGUI, ALT_HASH, GR_DASH, SYM_LPRN, NAV_UNDS, KC_SPC, NUM_RPRN, CTRL_DLR, KC_F11, KC_F12, TG(_NAV)} }, /* QWERTY * ,-----------------------------------------------------------------------------------. * | Tab | Q | W | E | R | T | Y | U | I | O | P | Bksp | * |------+------+------+------+------+------+------+------+------+------+------+------| * | Bksp | A | S | D | F | G | H | J | K | L | ;: | '" | * |------+------+------+------+------+------+------+------+------+------+------+------`---. * |Shift=| Z | X | C | V | B | N | M | ,< | .> | /? |Shift/Ent | * |------+------+------+------+------+------+------+------+------+------+------+----------| * |CtCaps|GUIF4 | Alt# | Gr- | Sym( | Nav_ |Space | Num) |Ctrl$ | F11 | F12 |Nav toggle| * `---------------------------------------------------------------------------------------' */ [_QWERTY] = { {KC_TAB, KC_Q, KC_W, KC_E, KC_R, KC_T, KC_Y, KC_U, KC_I, KC_O, KC_P, KC_BSPC}, {KC_ESC, KC_A, KC_S, KC_D, KC_F, KC_G, KC_H, KC_J, KC_K, KC_L, KC_SCLN, KC_QUOT}, {KC_LSFT, KC_Z, KC_X, KC_C, KC_V, KC_B, KC_K, KC_M, KC_COMM, KC_DOT, KC_SLSH, SFT_T(KC_ENT) }, {CTL_T(KC_CAPS), KC_LGUI, KC_LALT, GR_DASH, SYM_LPRN,KC_SPC, KC_SPC, NUM_RPRN, KC_LEFT, KC_DOWN, KC_UP, KC_RIGHT} }, /* Symbols * ,-----------------------------------------------------------------------------------. * | Tab | ` | @ | / | * | ^ | % | : | + | - | Del | Bksp | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | [ | ; | } | ( | " | ' | ) | {\n | ! | ] | \ | * |------+------+------+------+------+------+------+------+------+------+------+------| * |Shift | . | { | < | > | ~ | X2 | = | & | | | ? |Enter | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | | | | Sym | _ |Space | NUM | X2 | | | | * `-----------------------------------------------------------------------------------' */ [_SYM] = { {KC_TAB, KC_GRV, KC_AT, KC_SLSH, KC_ASTR, KC_CIRC, KC_PERC, KC_COLN, KC_PLUS, KC_MINS, KC_DEL, KC_BSPC}, {FUNCTION,KC_LBRC, KC_SCLN, KC_RCBR, KC_LPRN, KC_DQT, KC_QUOT, KC_RPRN, LCBR_ENT,KC_EXLM, KC_RBRC, KC_BSLS}, {KC_LSFT, KC_DOT, KC_LCBR, KC_LABK, KC_RABK, KC_TILD, _______, KC_EQL, KC_AMPR, KC_PIPE, KC_QUES, KC_ENT }, {THEN, CATCH, XXXXXXX, XXXXXXX, _______, _______, _______, _______, _______, XXXXXXX, XXXXXXX, _______} }, /* Numbers * ,-----------------------------------------------------------------------------------. * | Tab | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 0 | Bksp | * |------+------+------+------+------+------+------+------+------+------+------+------| * | Bksp | x | D | E | F | + | - | 4 | 5 | 6 | 0 |Enter | * |------+------+------+------+------+------+------+------+------+------+------+------| * |Shift | ( | A | B | C | * | / | 1 | 2 | 3 | ) |Enter | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | | | GR | SYM | _ |Space | NUM | 0 | . | f | | * `-----------------------------------------------------------------------------------' */ [_NUM] = { {KC_TAB, KC_1, KC_2, KC_3, KC_4, KC_5, KC_6, KC_7, KC_8, KC_9, KC_0, KC_BSPC}, {KC_BSPC, KC_X, S(KC_D), S(KC_E), S(KC_F), KC_PLUS, KC_MINS, KC_4, KC_5, KC_6, KC_0, KC_ENT }, {KC_LSFT, KC_LPRN, S(KC_A), S(KC_B), S(KC_C), KC_ASTR, KC_SLSH, KC_1, KC_2, KC_3, KC_RPRN, KC_ENT }, {_______, _______, _______, _______, _______, _______, _______, _______, KC_0, KC_DOT, KC_F, _______} }, /* Gr layer / international keys * ,-----------------------------------------------------------------------------------. * | | ä | å | | ¢£ | €¥ | | ë | ê | ü | ù | | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | â | à | ß | | | | è | é | ï | ö | | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | æ | ô | ç | œ | | | û | « | » | î | | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | | | | | | | | | | | | * `-----------------------------------------------------------------------------------' */ [_GR] = { {_______, UC(0xE4),UC(0xE5), _______,UC(0xA2), UC(0x20AC),_______,UC(0xEB),UC(0xEA),UC(0xFC),UC(0xF9), _______}, {_______, UC(0xE2),UC(0xE0),UC(0xDF), _______, _______, _______,UC(0xE8),UC(0xE9),UC(0xEF),UC(0xF6), _______}, {MO(_GR_S),UC(0xE6),UC(0xF4),UC(0xE7),UC(0x153),_______, _______,UC(0xFB),UC(0xAB),UC(0xBB),UC(0xEE), MO(_GR_S)}, {_______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______} }, // Shifted layer of the above [_GR_S] = { {_______, UC(0xC4),UC(0xC5), _______, UC(0xA3), UC(0xA5),_______, UC(0xCB),UC(0xCA),UC(0xDC),UC(0xD9),_______}, {_______, UC(0xC2),UC(0xC0), UC(0xDF),_______, _______, _______, UC(0xC8),UC(0xC9),UC(0xCF),UC(0xD6),_______}, {MO(_GR_S),UC(0xC6),UC(0xD4), UC(0xC7),UC(0x152),_______, _______, UC(0xDB),UC(0xAB),UC(0xBB),UC(0xCE),MO(_GR_S)}, {_______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______} }, /* Adjust Layer (Gr + Num) * ,-----------------------------------------------------------------------------------. * | F1 | F2 | F3 | F4 | F5 | F6 | F7 | F8 | F9 | F10 | F11 | F12 | * |------+-------------+------+------+------+------+------+------+------+------+------| * | | |Prntscr| ESC |VOLUP | PLAY | PREV |QWERTY|COLEMAK|PLOVER| | | * |------+-------------+------+------+------+------+------+------+------+------+------| * | | |BACKLIT| INS |VOLDWN| MUTE | NEXT | WIND |LINUX | OSX | | | * |------+-------------+------+------+------+------+------+------+------+------+------| * | | | | | | | | | | | | | * `-----------------------------------------------------------------------------------' */ [_ADJ] = { {KC_F1, KC_F2, KC_F3, KC_F4, KC_F5, KC_F6, KC_F7, KC_F8, KC_F9, KC_F10, KC_F11, KC_F12 }, {XXXXXXX, XXXXXXX, KC_PSCR, KC_ESC, KC_VOLU, KC_MPLY, KC_MPRV, QWERTY, COLEMAK, PLOVER, XXXXXXX, XXXXXXX}, {XXXXXXX, XXXXXXX, BL_STEP, KC_INS, KC_VOLD, KC_MUTE, KC_MNXT, WINDOWS, LINUX, OSX, XXXXXXX, XXXXXXX}, {_______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______} }, /* Navigation * ,-----------------------------------------------------------------------------------. * | | | BTN3 | BTN2 | BTN1 | | ACL0 | HOME | PGDN | PGUP | END | Bksp | * |------+------+------+------+------+------+------+------+------+------+------+------| * | |MSLEFT| MSDN | MSUP |MSRGHT| | ACL1 | LEFT | DOWN | UP | RGHT |Enter | * |------+------+------+------+------+------+------+------+------+------+------+------| * | |WHLEFT| WHDN | WHUP |WHRGHT| | ACL2 |C-LEFT|C-PGDN|C-PGUP|C-RGHT|Enter | * |------+------+------+------+------+------+------+------+------+------+------+------| * | | | | | | _ |Space | ACL0 | ACL1 | ACL2 | |TGLNAV| * `-----------------------------------------------------------------------------------' */ [_NAV] = { {XXXXXXX, XXXXXXX, KC_BTN3, KC_BTN2, KC_BTN1, XXXXXXX, KC_ACL0, KC_HOME, KC_PGDN, KC_PGUP, KC_END, KC_BSPC}, {XXXXXXX, KC_MS_L, KC_MS_D, KC_MS_U, KC_MS_R, XXXXXXX, KC_ACL1, KC_LEFT, KC_DOWN, KC_UP, KC_RIGHT,KC_ENT }, {XXXXXXX, KC_WH_L, KC_WH_D, KC_WH_U, KC_WH_R, XXXXXXX, KC_ACL2, LCTL(KC_LEFT), LCTL(KC_PGDN), LCTL(KC_PGUP), LCTL(KC_RIGHT), KC_ENT }, {_______, _______, _______, _______, _______, _______, _______, KC_ACL0, KC_ACL1, KC_ACL2, _______, _______} }, /* Plover Layer * ,-----------------------------------------------------------------------------------. * | | S | T | P | H | * | * | F | P | L | T | D | * |------+------+------+------+------+------+------+------+------+------+------+------| * |TogOut| S | K | W | R | * | * | R | B | G | S | Z | * |------+------+------+------+------+------+------+------+------+------+------+------| * | # | # | # | # | # | # | # | # | # | # | # | # | * |------+------+------+------+------+------+------+------+------+------+------+------| * | Exit | | | # | A | O | E | U | | | | | * `-----------------------------------------------------------------------------------' */ [_PLOVER] = { {XXXXXXX, KC_Q, KC_W, KC_E, KC_R, KC_T, KC_Y, KC_U, KC_I, KC_O, KC_P, KC_LBRC}, {XXXXXXX, KC_A, KC_S, KC_D, KC_F, KC_G, KC_H, KC_J, KC_K, KC_L, KC_SCLN, KC_QUOT}, {KC_1, KC_1, KC_1, KC_1, KC_1, KC_1, KC_1, KC_1, KC_1, KC_1, KC_1, KC_1 }, {EXT_PLV, XXXXXXX, XXXXXXX, KC_1, KC_C, KC_V, KC_N, KC_M, XXXXXXX, XXXXXXX, XXXXXXX, XXXXXXX} } }; void matrix_init_user(){ set_unicode_input_mode(UC_LNX); } const macro_t *action_get_macro(keyrecord_t *record, uint8_t id, uint8_t opt) { switch(id) { case M_ALT_HASH: return MACRO_TAP_SHFT_KEY_HOLD_MOD(record, 3, LALT); case M_GR_DASH: { const macro_t* macro = MACRO_TAP_HOLD_LAYER(record, MACRO(T(MINS)), _GR); update_tri_layer(_NUM, _GR, _ADJ); return macro; } case M_SYM_LPRN: return MACRO_TAP_SHFT_KEY_HOLD_LAYER(record, 9, _SYM); case M_NAV_UNDS: return MACRO_TAP_SHFT_KEY_HOLD_LAYER(record, MINS, _NAV); case M_NUM_RPRN: { const macro_t* macro = MACRO_TAP_SHFT_KEY_HOLD_LAYER(record, 0, _NUM); update_tri_layer(_NUM, _GR, _ADJ); return macro; } case M_CTRL_DLR: return MACRO_TAP_SHFT_KEY_HOLD_MOD(record, 4, LCTL); case M_LCBR_ENT: return MACRODOWN(I(10), D(LSFT), T(LBRC), U(LSFT), T(ENT), END); case M_PLOVER: if (record->event.pressed) { layer_and(0); layer_on(_PLOVER); default_layer_set(_PLOVER); // Starts plover return MACRO(I(10), D(E), D(R), D(F), D(V), D(I), D(K), U(E), U(R), U(F), U(V), U(I), U(K), END); } break; case M_EXT_PLV: if (!record->event.pressed) { layer_off(_PLOVER); default_layer_set(_COLEMAK); //Pauses plover return MACRO(I(10), D(E), D(R), D(F), D(V), D(Y), D(U), U(E), U(R), U(F), U(V), U(Y), U(U), END); } break; case M_WINDOWS: set_unicode_input_mode(UC_WIN); break; case M_LINUX: set_unicode_input_mode(UC_LNX); break; case M_OSX: set_unicode_input_mode(UC_OSX); break; case M_FUNCTION: if (record->event.pressed) { SEND_STRING("function"); } break; case M_THEN: if (record->event.pressed) { SEND_STRING("then"); } break; case M_CATCH: if (record->event.pressed) { SEND_STRING("catch"); } break; } return MACRO_NONE; };
package main import ( "encoding/base64" "fmt" "os" "path/filepath" "strings" ) func Error(format string, a ...interface{}) { fmt.Fprintf(os.Stderr, format, a...) } func print_url_blank() { base_url := get_base_url() if base_url == nil { return } fmt.Printf("%s\n", base_url.String()) } func print_url() { base_url := get_base_url() if base_url != nil { tmp := *base_url tmp.RawQuery = "" fmt.Printf("URL: %s\n", tmp.String()) fmt.Printf("Query: %s\n", base_url.RawQuery) } else { fmt.Printf("No base URL\n") } } func print_headers() { headers, n := get_headers() if n == 0 { fmt.Printf("No HTTP headers\n") } else { headers.Write(os.Stdout) } } func main() { args := os.Args[1:][:] _, tool_name := filepath.Split(os.Args[0]) if len(args) == 0 { Error(`Usage: %s <command or HTTP method> [args...] Commands: url <base_url> - Sets base URL in environment; must be absolute URL. To clear base URL, use "-" as <base_url>. url - Displays current base URL from environment. env - Displays environment: URL, blank line, then HTTP headers (one per line). session - Displays environment session ID. Use $HTTPCLI_SESISON_ID env var to override. Default is "yyyy-MM-dd-########" with datestamp and parent process pid. reset - Resets environment; clears HTTP headers and base URL. set <name> <value> - Sets a custom HTTP header in environment. list - List current HTTP headers in environment. clear - Clears HTTP headers in environment. basic <user> <password> - Set Basic authentication header. HTTP: <method> <url> [content-type] [options...] Invoke HTTP method against <url>; if <url> is relative, <url> is combined with <base_url>. If <method> is POST or PUT then a request body is required. [content-type] is required if <method> is not POST or PUT but a request body is needed. Request body is read from stdin until EOF, buffered into memory, and submitted with a calculated Content-Length header value. Alternate Transfer-Modes are not supported currently. [content-type] default is "application/json" [options...]: -x <header1,header2,header3,...> - Exclude headers from request -p - Pretty-print JSON output -q - Quiet mode; only output response body to stdout -f <N> - Follow at most N redirects (default 0) `, tool_name) os.Exit(1) return } // Determine what to do: cmd := args[0] args = args[1:] // Load environment data from file: load_env() // Process command: switch strings.ToLower(cmd) { case "url": if len(args) == 0 { print_url_blank() } else if len(args) == 1 { set_base_url(args[0]) store_env() } break case "env": base_url := get_base_url() if base_url != nil { fmt.Printf("%s\n\n", base_url.String()) } else { fmt.Printf("No base URL\n\n") } // Get HTTP headers from environment: print_headers() break case "session": // Print current session ID for the environment: fmt.Printf("%s\n", SessionID()) break case "list": // Get HTTP headers from environment: print_headers() break case "set": // Get HTTP headers from environment: headers, _ := get_headers() if len(args) >= 2 { // Set a new HTTP header: headers.Set(args[0], strings.Join(args[1:], " ")) } else if len(args) == 1 { delete(headers, args[0]) } else { Error("Missing header name and value\n") os.Exit(1) return } set_headers(headers) store_env() break case "basic": if len(args) != 2 { Error("Required username and password arguments\n") os.Exit(1) return } headers, _ := get_headers() headers.Set("Authorization", fmt.Sprintf("Basic %s", base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", args[0], args[1]))))) set_headers(headers) store_env() break case "clear": set_headers(nil) store_env() break case "reset": set_headers(nil) set_base_url("") store_env() break // HTTP methods: default: status_code := do_http(strings.ToUpper(cmd), args) if status_code >= 500 { os.Exit(5) } else if status_code >= 400 { os.Exit(4) } else { // 300-, 200-, 100- are not errors. os.Exit(0) } break } }
<filename>src/Page/Soltys.Page/ClientApp/src/cv/Picture.tsx import React from 'react'; const Picture = () => { return ( <div className="pic-box"> <a href="https://www.gravatar.com/avatar/34b3ce730aa7e53b7e6aaf47e964d3b0.jpg?s=720"> <img src="https://www.gravatar.com/avatar/34b3ce730aa7e53b7e6aaf47e964d3b0.jpg?s=150" alt="<NAME>" className="pic" /> </a> </div> ) } export default Picture;
// CreateTeam creates a new team, an error means the ID is taken. func (mm *MutMap) CreateTeam(teamID string) (server.Team, error) { mm.mutex.Lock() defer mm.mutex.Unlock() team, ok := mm.teams[teamID] if ok { return team, errors.New("exists") } team.ID = teamID mm.teams[teamID] = team if mm.onNewTeam != nil { mm.onNewTeam <- teamID } return team, nil }
The New American Encounter with International Human Rights Norms: The Road After Abu Ghraib In this research, I paint a canvas of how international human rights norms have entered American civil society-governmental contests over torture and detainee rights policies after Abu Ghraib and assess the extent to which efforts by select media outlets, human rights nongovernmental organizations (NGOs), and congressional leaders have produced a new American engagement with the international human rights framework. The study is empirical (stemming from 11 interviews and content analysis of select media coverage) and focuses on the confirmation of Alberto Gonzales as U.S. Attorney General and the passage of the McCain Anti-torture Amendment to the 2006 Department of Defense Appropriation Act. My analysis is informed by a constructivist framework that highlights the interplay between norms, and power or interests. Constructivism underscores elements of "shared moral assessment" or evaluation encompassed by international norms and maintains that international norms can produces shifts in actors' constitution of their identities and interests. Although earlier waves of constructivist scholarship focused on processes of persuasion and communication taking place on the international plain, increasingly, scholars are looking at these processes at the domestic level. An intractable commitment to upholding liberal rights has been a central tenet of dominant American identity constructions. In relation to the international human rights regime, this identity construction has been invoked to paint international norms as redundant or irrelevant to the American experience. In U.S. political and legal discourses, the Constitutional rights framework has routinely been portrayed as above and beyond the international rights regime, thus rendering America's commitment to human rights implicit in the sanctity accorded to its constitution. Further, the United States' inherent respect for universal rights is conceived of relationally, or in opposition to others" ineptitude in upholding rights. When extended, these dynamics produce a critical paradox. At the same time that dominant American identity constructions marginalize the international human rights regime, important aspects of American identity and self-image are tied up with an assumption of America adhering to human rights principles. After September 11th, when these formulations converged with emerging national security discourses, a number of troubling trends developed. Depictions of public international law as not real law, as naive, advisory, and irrelevant, or "quaint" and "obsolete" gained currency. Human rights NGOs struggled for funding and media coverage of detainee rights issues. Whether torture was a necessary evil in the "War on Terrorism" became an issue of legitimate debate, with the pro and con positions being placed on par and the pro position gaining momentum. Finally, American policymakers appropriated the moral authority and normative legitimacy of human rights discourses in order to simultaneously justify military interventions and avoid the application of these norms within those interventions. That constitutional protections could not be extended to non-citizens was treated merely as a regrettable but unalterable fact. Then came Abu Ghraib. Abu Ghraib forced American leaders to reconcile the gulf between their self-image as deploying benign power in the service of rights and freedom, and incontrovertible evidence of American power producing rights violations. Immediately, the pictures produced condemnation of the violations and rhetorical acceptance of the substance and legitimacy of the international norms violated. Most significant for the current analysis, the gripping images mobilized and opened up an important discursive space for domestic actors who sought American human rights compliance. The three primary domestic actors I identify are human rights NGOs, the media, and congressional leaders. …
<reponame>dropbox/changes-artifacts<gh_stars>1-10 // generated by stringer -type=ArtifactState; DO NOT EDIT package model import "fmt" const _ArtifactState_name = "UNKNOWN_ARTIFACT_STATEERRORAPPENDINGAPPEND_COMPLETEWAITING_FOR_UPLOADUPLOADINGUPLOADEDDEADLINE_EXCEEDEDCLOSED_WITHOUT_DATA" var _ArtifactState_index = [...]uint8{0, 22, 27, 36, 51, 69, 78, 86, 103, 122} func (i ArtifactState) String() string { if i >= ArtifactState(len(_ArtifactState_index)-1) { return fmt.Sprintf("ArtifactState(%d)", i) } return _ArtifactState_name[_ArtifactState_index[i]:_ArtifactState_index[i+1]] }
<filename>ext/stub/internalinterfaces.zep.h extern zend_class_entry *stub_internalinterfaces_ce; ZEPHIR_INIT_CLASS(Stub_InternalInterfaces); PHP_METHOD(Stub_InternalInterfaces, count); ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_stub_internalinterfaces_count, 0, 0, IS_LONG, 0) ZEND_END_ARG_INFO() ZEPHIR_INIT_FUNCS(stub_internalinterfaces_method_entry) { PHP_ME(Stub_InternalInterfaces, count, arginfo_stub_internalinterfaces_count, ZEND_ACC_PUBLIC) PHP_FE_END };
/** * This class represents data returned by ESP32 relay switch on getting current device status. It is used for data * serialization. * @see <a href="https://github.com/kyberpunk/esp-relay-switch">https://github.com/kyberpunk/esp-relay-switch</a> */ @Data public class DeviceInfo { /** * ID of the device. It is set during firmware deployment. */ @JsonProperty("id") private String deviceId; /** * Current state of relay. When true then it is switched on. */ private boolean switchedOn; /** * Timeout in milliseconds. After which the relay position will be reverted. If it is 0 then the state is permanent. */ private int timeout; /** * UTC timestamp in milliseconds of last relay state change. */ @JsonProperty("lastChangeUtcMillis") private Date lastChange; }
/** * search a meeting by username and title * @param uesrName the sponsor's userName * @param title the meeting's title * @return a meeting list result */ func QuitMeeting(username string, title string) bool { flag :=entity.QueryMeeting(func (m *entity.Meeting) bool { return m.GetTitle() == title && m.IsParticipator(username) == true }) if len(flag) == 0 { return false } entity.UpdateMeeting(func (m *entity.Meeting) bool { return m.IsParticipator(username) == true && m.GetTitle() == title }, func (m *entity.Meeting) { m.DeleteParticipator(username) }) entity.DeleteMeeting(func (m *entity.Meeting) bool { return len(m.GetParticipator()) == 0 }) return true }
/** * @brief Initializes the DMA control data structure according to the specified * parameters. * @param DMA_ctrl_data_ptr: pointer to a DMA_CtrlDataInitTypeDef structure that * contains the control data structure to initialize * @param DMA_ctrl_table_ptr: pointer to a DMA_CtrlDataTypeDef structure that * contains the initial control data configuration, provided by the application. * @retval None */ void DMA_CtrlDataInit(DMA_CtrlDataInitTypeDef *DMA_ctrl_data_ptr, DMA_CtrlDataTypeDef *DMA_ctrl_table_ptr) { assert_param(IS_DMA_CHANNELS(DMA_Channels_Number)); assert_param(IS_DMA_ALTERNATE_DATA(DMA_AlternateData)); assert_param(IS_DMA_SOURCE_INC_STATE(DMA_ctrl_data_ptr->DMA_SourceIncSize)); assert_param(IS_DMA_DEST_INC_STATE(DMA_ctrl_data_ptr->DMA_DestIncSize)); assert_param(IS_DMA_MEMORY_DATA_SIZE(DMA_ctrl_data_ptr->DMA_MemoryDataSize)); assert_param(IS_DMA_MODE(DMA_ctrl_data_ptr->DMA_Mode)); assert_param(IS_DMA_CYCLE_SIZE(DMA_ctrl_data_ptr->DMA_CycleSize)); assert_param(IS_DMA_CONTINUOUS_NUMBER(DMA_ctrl_data_ptr->DMA_NumContinuous)); assert_param(IS_DMA_SOURCE_PROT(DMA_ctrl_data_ptr->DMA_SourceProtCtrl)); assert_param(IS_DMA_DEST_PROT(DMA_ctrl_data_ptr->DMA_DestProtCtrl)); if (DMA_ctrl_data_ptr->DMA_SourceIncSize == DMA_SourceIncNo) { DMA_ctrl_table_ptr->DMA_SourceEndAddr = DMA_ctrl_data_ptr->DMA_SourceBaseAddr; } else { DMA_ctrl_table_ptr->DMA_SourceEndAddr = ( DMA_ctrl_data_ptr->DMA_SourceBaseAddr + ((DMA_ctrl_data_ptr->DMA_CycleSize - 1) << (DMA_ctrl_data_ptr->DMA_SourceIncSize >> 26))); } if (DMA_ctrl_data_ptr->DMA_Mode == DMA_Mode_MemScatterPri) { DMA_ctrl_table_ptr->DMA_DestEndAddr = (DMA_ctrl_data_ptr->DMA_DestBaseAddr + 12); } else { if (DMA_ctrl_data_ptr->DMA_DestIncSize == DMA_DestIncNo) { DMA_ctrl_table_ptr->DMA_DestEndAddr = DMA_ctrl_data_ptr->DMA_DestBaseAddr; } else { DMA_ctrl_table_ptr->DMA_DestEndAddr = ( DMA_ctrl_data_ptr->DMA_DestBaseAddr + ((DMA_ctrl_data_ptr->DMA_CycleSize - 1) << (DMA_ctrl_data_ptr->DMA_DestIncSize >> 30))); } } DMA_ctrl_table_ptr->DMA_Control = (DMA_ctrl_data_ptr->DMA_DestIncSize | DMA_ctrl_data_ptr->DMA_MemoryDataSize | DMA_ctrl_data_ptr->DMA_SourceIncSize | DMA_ctrl_data_ptr->DMA_DestProtCtrl | DMA_ctrl_data_ptr->DMA_SourceProtCtrl | DMA_ctrl_data_ptr->DMA_NumContinuous | ((DMA_ctrl_data_ptr->DMA_CycleSize - 1) << 4) | DMA_ctrl_data_ptr->DMA_Mode); }
Autonomous drone race: A computationally efficient vision-based navigation and control strategy Drone racing is becoming a popular sport where human pilots have to control their drones to fly at high speed through complex environments and pass a number of gates in a pre-defined sequence. In this paper, we develop an autonomous system for drones to race fully autonomously using only onboard resources. Instead of commonly used visual navigation methods, such as simultaneous localization and mapping and visual inertial odometry, which are computationally expensive for micro aerial vehicles (MAVs), we developed the highly efficient snake gate detection algorithm for visual navigation, which can detect the gate at 20HZ on a Parrot Bebop drone. Then, with the gate detection result, we developed a robust pose estimation algorithm which has better tolerance to detection noise than a state-of-the-art perspective-n-point method. During the race, sometimes the gates are not in the drone's field of view. For this case, a state prediction-based feed-forward control strategy is developed to steer the drone to fly to the next gate. Experiments show that the drone can fly a half-circle with 1.5m radius within 2 seconds with only 30cm error at the end of the circle without any position feedback. Finally, the whole system is tested in a complex environment (a showroom in the faculty of Aerospace Engineering, TU Delft). The result shows that the drone can complete the track of 15 gates with a speed of 1.5m/s which is faster than the speeds exhibited at the 2016 and 2017 IROS autonomous drone races. Introduction First person view (FPV) drone racing has been a popular sport in recent years, where the pilots have to control the drones to fly through gates decorated by LED lights at fast speed. In the field of robotics, drone racing has raised the question: how can drones be designed to fly races by themselves, possibly pilots. They use VIO for navigation which is computationally relatively expensive. Kaufmann et al. develop a strategy that combines a convolutional neural network (CNN) and minimum jerk trajectory generation. In their work, an in-house quadrotor with an Intel UpBoard and a Qualcomm Snapdragon Flight Kit which is used for VIO, is used as the platform. In , a systematic solution for the IROS autonomous drone race 2016 is presented. In their work, an NVIDIA Jetson TK1 single-board computer and a stereo camera are used for a visual servoing task. They finally passed through 10 gates within 86s and won the race. We will use their result as a benchmark to compare our research result. In this paper, we present a solution for autonomous drone racing, which is computationally more efficient than the solutions discussed above. For the gate detection, a novel light-weight algorithm, "snake gate detection", is described and analyzed in detail in Section 3. Instead of using a common, purely vision-based perspective-n-point (PnP) algorithm, we combine the onboard attitude estimate with the gate detection result to determine the position of the drone. We show that this is more robust than the PnP method. Then, a novel Kalman filter is introduced that uses a straightforward drag model to estimate the velocity of the drone. Two control strategies to control the drone to go through the gate and find the next gate are discussed in Section 4. In Section 5, flight tests are performed with a Parrot Bebop 1 drone, by replacing the Parrot firmware with our Paparazzi autopilot code. All algorithms run in real-time on the limited Parrot P7 dual-core CPU Cortex A9 processor, and no hardware changes are required as the vision algorithms use the frontal camera and other sensors already present in the Bebop. The flight experiments are done in a complex and narrow environment (a showroom displaying aircraft components in the basement of Aerospace Engineering, TU Delft). 1 The result shows that the drone can fly through a sequence of 15 gates autonomously using only onboard resources in a very complex environment with a velocity of up to 1.5m/s. System overview The quadrotor hardware used as experiment platform in this work is a commercially available Parrot Bebop 1 ( Figure 1). However, all Parrot software was replaced by own computer vision, own sensor drivers and own navigation and control using the Paparazzi-UAV open-source autopilot project. Only the Linux operating system was kept. The most important characteristics are listed in Table 1. It should be noted that the image from the front camera as used by our autopilot in this work is only 160 × 350 pixels and all the processing for the drone race takes place on the Parrot P7 dual-core CPU Cortex 9 (max 2GHz), although the Bebop is equipped with a quad core GPU. The structure of the system is shown in Figure 2. For visual navigation, a novel algorithm, snake gate detection, is implemented to detect the gates. It outputs the coordinates of detected gates' corners, which are then sent to the pose estimation block. In pose estimation block, the coordinates of the gate corners on the image plane would be projected to 3D space, which provides the relevant position between the drone and the gate. For attitude and heading reference system (AHRS), a classic complementary filter is employed. At last, the position measured by the front camera, attitude estimation from AHRS and IMU measurement are fused by a Kalman filter to provide a position estimate. In terms of control, when the target gate is in the field of view, a PD controller (Control block in Figure 2) is used to steer the drone to align with the center of the gate. After passing through the gate or there is no gate in the field of view, a prediction-based feed-forward control scheme is employed to steer the drone to the next gate, which will be further explained in Section 4. An adaptive incremental nonlinear dynamic inversion (INDI) controller is used as low-level attitude controller . The race track can be divided into two parts. The first part is the approaching gate part where the target gate can be used by the drone for navigation. The other one is after gate part, which starts from the point where the drone passing through the gate and ends at the point where the drone can see the next gate. The different race tracks can be seen as the different combination of these two parts. Thus, at first , due to the space restriction of our experimental environment, we simplify the race track to a two gates track which can be seen in Figure 3. Most of our experiments are done and analyzed in this simplified race track with the ground truth measurement provided by Opti-track. At last, the system is moved to a more complex and realistic drone race track to be verified. Vision navigation In the FPV drone race, gates are usually decorated with LEDs in order to be easily recognized by drone pilots. Drone pilots can then use the gates to navigate themselves to approach the gates. Inspired by FPV drone race, in our research, we also use gates for navigation since their simple shape and relatively large size make them relatively easy to be extracted and their projection on the image plane can provide information such as position and attitude of the drone. In this section, we first present an efficient gate detection method to extract the four corners of the gate on the image plane. Next, the position of the four corners of the gate is projected to 3D space combining AHRS reading. At last, a Kalman filter providing position estimation by fusing the vision measurement, the IMU measurement and the onboard AHRS reading is discussed. Gate detection Gate detection can be accomplished by multiple different computer vision methods, such as Viola and Jones , Hough transform and deep learning . In this article, we propose a novel gate detection algorithm called snake gate detection which is lightweight and easy to be implemented onboard. We search the gates based on their colors on an distorted image because the undistortion procedure for each image can slow down the whole detection procedure. (Figure 4) Luckily, our detection method can still work properly on this distorted image. The search starts by randomly sampling in the original image. If a random point P 0 hits the target color (gate's color), we continue searching 'up and down' to find points P 1 and P 2 . It should be noted that this search can search along the edge of the oblique bar of the gate. (Figure 4a) To prevent that the algorithm may find some small color blocks which have the same color as the gate, we introduce a threshold, which is called the minimum length threshold σ L . If P 1 − P 2 < σ L , this search would be terminated. Then, we use P 1 and P 2 as start points respectively to search 'left and right' to find P 3 and P 4 . Similar to the vertical search, the horizontal search can also search along the oblique bar and the result would be checked by σ L to ensure that the detection is not too small and hence unlikely to be a gate. The algorithm can be found in Algorithm 1. It should be noted that while small σ L may lead to acceptance of some small detections which in most cases are false positive detections, large σ L can lead to the result that some gate in the image are rejected. The selection of σ L will be discussed later in this section. If the gate's image is continuous in the image plane and the gates' edges are smooth, snake gate detection should find all four points. (Figure 4a) However, due to varying light conditions, some parts of the gate may get overexposure or underexposed which may lead to color deviation. For example, in Figure 4b, part of the lower bar gets overexposed. In this case, P 4 will not reach the real gate's corner. Hence, a refining process is employed to find the real gate's corner. To refine the detection, a square with minimum length including four points is firstly obtained. (Red square in Figure 4b) Then four small squares (a) If the gate is continuous on image plane, snake gate detection algorithm should find all four corners P 1 , P 2 , P 3 and P 4 (b) When the gate is not continuous on image plane, first a square S 1 , S 2 , S 3 , S 4 with minimum length including P 1 , P 2 , P 3 , P 4 is found. Four small squares centering at S i are then found. In these small squares, a histogram analysis helps to refine our estimate of the gate's corners in the image centering at S i are found.(Four gray square in Figure 4b) The raw detection is refined by finding the centroid of the patch around each rough corner. In one image, in most cases, the number of detected gates N d is larger than the number of real gates in the image N g . It can be caused by duplicated samples on the same gate, which are true positive detections and do not affect the performance of navigation. The other reason for N d > N g is the false positive detections, which affects the accuracy of navigation significantly and should be eliminated. Here, another threshold, color fitness threshold σ cf , is introduced to help decrease the number of false positive detection. where N c is the total number of pixels on the polygon whose color is target color and N is the number of pixels on the polygon. Only the gates whose cf > σ cf will be accepted as detected gates. Similar to minimum length threshold σ L , the selection of σ cf also affects the detection accuracy significantly. To evaluate the performance of the snake gate detection algorithm, 600 onboard images with/without gate are used to test the algorithm.( Figure 5) The ROC curve with varying σ L is shown in Figure 6. It should be noted that the detection is done 10 times with one σ L to obtain the statistical result. The x-axis of ROC curve is average of false positive detection per image and the y-axis is true positive rate. To make the trend in Figure 6a clearer, we enlarge if isTargetColor(P 0 ,image) then 5: = searchUpDown(P 0 ,image) 6: if P 1 − P 2 > σ L then 7: end for 19: end procedure local part of the ROC curve by using logarithm coordinate system in Figure 6b. From ROC curve, it can be seen that when σ L is small (σ L < 15), the number of the false positive detections decreases significantly while σ L increases without sacrificing TPR.That is because σ L helps to reject the small detections caused by small color blocks of the environment. When σ L > 35, however, TPR decreases sharply, the reason is that σ L is too large to accept true positive detections. σ L = 25 can give the optimal option with low FPs/image and almost highest TPR. Then, with σ L = 25 , we draw another ROC curve with varying σ cf , which is shown in Figure 7. It can be seen that with increasing σ cf , false positive detections decrease without significantly decreasing of TPR. In autonomous drone race 2017, we tuned σ L through experimental trial-anderror and accept the detection with highest color fitness, from which, the ROC point is plotted by red circle in Figure 6 and Figure 7. It is remarkably close to the optimal thresholds one would pick, given this more extended analysis. Please note that the algorithm used in the 2017 drone race only accepted the gate with the highest color fitness, and not every gate that was over the color threshold. It should also be noted that the method described above can be used for tuning σ L and cf automatically. However, manually labeling and running snake gate detection on the dataset for each set of parameters is time-consuming especially when the drone needs to be deployed in a new racing track with limited preparing time. It should be noted that the true positive rate in above figures is the statistical result on the entire dataset. In order to evaluate how good or bad a true positive Algorithm 2 search in vertical direction (search in horizontal direction is similar) 1: procedure searchUpandDown(P0, image) 2: if isTargetColor(P 1 .x, P 1 .y − 1) then 5: else if isTargetColor(P 1 .x + 1, P 1 .y − 1) then 10: while !done do 18: if isTargetColor(P 2 .x, P 1 .y + 1) then 19: else if isTargetColor(P 1 .x + 1, P 1 .y + 1) then 24: return P 1 , P 2 31: end procedure rate of 0.46 is, one has to take additional factors into account. Importantly, the distance between the drone and the gate can significantly affect the detection. Figure 8 shows how the true positive rate changes with the change of distance between the gate and the drone. It is very clear that when the drone gets closer to the gate, the snake gate detection has a higher true positive rate, reaching 70% at close distances. Figure 9 shows the detection result while the drone approaches the gate. In the beginning, the distance between the drone and the gate is large which leads to false negative detections. Once the drone starts detecting the gate, it can detect the gate most of times. However, there still exist some false negative detections. But these false negative detections could be handled by filters which will be explained in details next section. When the drone is close to the gate (< 1m), only part of the gate can be seen. In this scenario, snake gate detection will not detect the gate. A second detection called histogram gate side detection is employed to replace snake gate detection when the position estimate from the Kalman filter is < 1m. ( Figure 10) This detection algorithm accumulates the number of target color pixels by each column. Then two peaks of the histogram which represent two sidebars of the gate can be found. Later, the position of these two bars can be used by pose estimation to extract relative position between the gate and the drone. Pose estimation When a gate with known geometry is detected, its image can provide the pose information of the drone. The problem of determining the position and orientation of a camera given its intrinsic parameters and a set of n correspondences between 3D points and their 2D projections is called Perspective-n-Point (PnP) problem. In our case, 4 coplanar control points (gate corners) are available which leads to a unique solution. However, PnP is sensitive to the mismatches of 3D points and 2D points which in our case is inevitable because the vibration and complex environment. Therefore, these methods are usually combined with RANSAC scheme to reject noise and outliers. Unfortunately, the fact that only four corner points are available on one gate limits the effectiveness of such a scheme. In this section, a novel algorithm combining gate detection result and the onboard AHRS attitude estimation will be derived to provide the pose estimation of the drone. Since we are using a fish-eye camera, a calibration procedure should be done first . Then, the camera can be simplified as a pinhole camera model. ( Figure 11) According to the similar triangle principle, we have Assume that each pixel's size is d x and length d y and the principle points' coordinate is (C x , C y ), we could transfer the pinhole model 2 to To write the pinhole model 3 in homogeneous coordinates, we have , v,C x and C y are in pixel unit. From Figure 11, it can be seen that the 3D point P , the image point P and the focal point O c are on one line. Thus, the direction of the light ray from O c to P can be described by a bearing vector v which can be expressed in camera frame by To express vector v in earth frame, we introduce 2 rotation matrices B C and E B . B C is the rotation matrix from camera frame C to body frame B which is a fixed matrix. B C is the rotation matrix from body frame B to earth frame E consist of three Euler angle ψ, θ and φ, which can be measured from onboard AHRS system. Thus, bearing vector v could be expressed in the Earth frame E by Figure 11: A pinhole camera model. Oc is the focal point and the origin of camera frame A line passing through point P with direction v can be written as The perpendicular distance D(t; p, v) of a point t to line L(p, v) is According to the pinhole model, 4 light rays with bearing vectors v i from four corners of the gate should intersect at the focal point t ( Figure 12), which is the position of the drone. The bearing vectors can be calculated by the four points' images on the image plane and camera's intrinsic parameters. This intersection point could be calculated analytically. However, due to the detection error of the gate's corners, bearing vectors can be wrongly calculated, for example, in Figure 12 four light rays do not intersect at one point.(gray line) Thus, there is no analytical solution of camera's position. Instead of finding analytical solution of camera's position, a numerical solution is found that finds a point whose distance to the four light rays is minimum. Hence, estimating the position of the drone can be converted to an optimization problem that finds an optimal point t which has minimal distance to 4 light rays, which can be expressed mathematically by which is a least squares problem. When the drone is close to the gate, only two sidebars can be detected by the histogram method. With the position of bars on the image plane, the pose of the drone can be estimated by geometrical principle. In Figure 13, α 1 and α 2 are calculated by the position of the image of two bars on image plane and intrinsic parameters. Then we have where g s is the length of the gate. Hence, based on the detection of the histogram peaks in the image (corresponding to α 1 and α 2 ), we can deduce the lateral position of the camera with respect to the gate (x h and y h ). Figure 14 where each point represents a thousand trails of the position estimation algorithm in the presence of pixel noise. It can be seen that the error varies mainly as a function of distance to the gate. The LS method uses prior knowledge of the attitude and heading of the vehicle to obtain a more accurate position estimate. To study the effect of attitude error, noise with a variance of 0, 5 and 15 degrees is added to the attitude and heading estimates. It is clear from the figure that the LS method has far higher accuracy in RMSE compared to the PnP method, even in the presence of relatively large noise in the attitude estimate. Also, the histogram position estimation method is evaluated in simulation. Similar to the LS method, pixel noise with a standard deviation of 3.5 is introduced. Figure 15 shows the results of the position RMSE in the horizontal plane in x and y-direction. The experiment is performed with a heading angle of -30, 0 and 30 degrees. From the figure, it can be observed that the position error of this method is relatively low. However, in reality, the method is only effective up to a maximum distance of 1.5 meters, due to the possible background color leading to spurious histogram peaks that are hard to filter out. Vision-IMU state estimation In order to close the control loop, state estimation is essential since the measurements (in our case, distance from vision, acceleration and angular ve- Kalman filter is used commonly in many scenarios. It first integrates angular rate to gain rotation matrix from body to earth E B . Next, E B is used to rotate acceleration measured by the accelerometer to earth frame. Then, the acceleration will be integrated twice to gain the position. And finally, position measurement will be used to correct the position prediction. Usually, UAVs' onboard IMUs are low-cost MEMS which suffer from biases and noise severely. During the prediction phase, the bias of accelerometer is integrated twice which may cause the prediction to deviate from the real position over time. If the position measurement has a relatively high frequency, the deviation of the position prediction could be corrected before it diverges. At the same time, the bias of IMU could also be estimated as states in the system and it should converge in short time. However, in our case, position measurements come from onboard image processing which has a low rate of around 20 HZ and the drone may cover significant durations without vision measurements. In this case, position prediction may deviate largely before new position measurement comes. Thus, the bias estimation converges slowly. In this section, we adopt the drone's aerodynamics model to the prediction model in Kalman filter which has a better performance than classic 15-states Kalman filter. The kinematics of the drone can be described bẏ To express V in body frame, we have where,x E , y E , z E are the drone's position in earth frame E. v B x , v B y , v B z are the drone's velocity in body frame B. One property of the onboard accelerometer is that it measures specific force F s in body frame B instead of vehicle's acceleration. The specific force in Z B direction is mainly caused by thrust T under the assumption that the thrust of quadrotor is aligned with Z B . The force acting on X B and Y B can be caused by many factors, for instance, blade flapping, profile drag, and translational drag. But they could be approximated as a linear function, assuming that the indoor environment has no wind: a B where k x and k y are drag coefficient which could be identified off-line. With this property, the accelerometer can actually provide the information of velocity of the drone by v B x where a m x and a m y are the measurement of accelerometer. b x a and b y a are the bias of accelerometer. Combine equation 12 and equation 14, we have In equation 15, the bias only needs to be integrated once to predict the position of the drone instead of being integrated twice in original 15-states Kalman filter, which could help to decrease the error of prediction. As mentioned above, the onboard AHRS system is a complementary filter, which on a low level fuses accelerometer and gyro data to estimate the attitude of the drone. It can directly provide the attitude estimation to the outer loop. The AHRS fusing only IMU data may introduce a bias to the attitude estimation. In this paper, we assume that this low level attitude estimation bias can be neglected. Hence, AHRS and accelerometer reading can be used as inputs to propagate the prediction model 15. According to Newton's laws of motion, the motion of the drone can be described as where g is gravity factor and p, q, r are angular velocity in body frame B measured by the gyro. Since in equation 15, body velocity has already had measurements from the accelerometer, in equation 16, we omit the first 2 equations and only leave the last equation combining with 14, which resultṡ With the assumption that gyro's bias is small, which can be neglected and the accelerometer's bias changes slowly, with states and inputs defined by Then, a standard EKF predict/update procedure will be done to estimate the states, which can be found in Appendix. To evaluate the performance of the visual navigation method described in this section, a flight test with a simplified two-gates track where the drone flies through two gates cyclically is done. (Figure 3) A first experiment aims to gather onboard data to be analyzed off-line. Hence, Opti-track system is used to provide accurate position measurements to make the loop closed. It should be noted that only in straight parts, the gate is in the drone's filed of view and the snake gate detection algorithm is done onboard, while the pose estimation and EKF are done off-board. The outer-loop controller is a PD controller combining Opti-track measurements to steer the drone to align with the center of the gate. In the arc parts, the gates are no longer available for navigation and the drone navigates itself to fly along an arc only by state prediction without the involvement of Opti-track, which will be explained in details in next section. The filtering result is shown in Figure 16. During the straight part (purple vision measurements), the EKF runs state prediction and measurement update loop and the estimated states curves (red) coincide with ground truth curves (blue) well. The error distributions between estimated states and ground-truth states are shown in Figure 17. All histograms are centered around 0 error. But there are still a few estimation errors above 0.2m in both x error and y error distribution which explains the fact that a few arcs end up at points which are more than 0.5m from target endpoint, which could be seen in next section. To make readers clearer to the experiment set up and result, a 3D ground truth and estimation result can be found in Figure18 In straight parts, Opti-track is used to help the drone align with the gates, while vision detection is done onboard for logging. In the arc parts, a feed-forward control with state prediction is employed, which explains the reason the arcs end up at slightly different points. Control strategy Like classic control strategy of quadrotor, our control system is also divided into a inner-loop controller which stabilizes attitude of the quadrotor and a outer-loop controller which steers the quadrotor along the desired trajectory. For the inner-loop controller, an INDI controller is employed on-board . For outer-loop control, we have two different control strategies for straight parts and arc parts respectively (Figure 19). During the straight part where the drone faces the gate and the gate is available for visual navigation, a PD controller is used to command a roll maneuver to steer the drone to align with the center of the gate while the pitch angle is fixed to a certain degree θ 0 and the heading is fixed to the same direction as the gate. where subscript c means command and position y is defined in local frame whose origin is fixed at the center of the gate. Arc parts (feed-forward controller) Detection Straight parts (PD controller) Figure 19: Two control strategies used in the experiment. When the drone faces the gates (straight parts), A PD controller combined with the Kalman filter is used to steer the drone to align with the gate. After passing through the gate, the drone switches to a feed-forward controller to fly an arc which ends in front of the next gate. At the point the drone flies through the gate, no position measurement is available. Thus, the outer-loop controller has to be switched to a pure feedforward controller relying on state prediction to turn a coordinated arc which ends in front of the next gate. To derive the control law in the arc, we first introduce body fixed earth frame F (Figure 20) whose origin O F is at the mass point of the drone, X F is along the heading of the drone, Z F points to the earth. In other words, the only non-zero Euler angle from E to F is yaw which is the same with the drone's yaw angle. To express Newton second law in F we have Figure 20: Body fixed earth frame F whose origin O F is at the mass point of the drone, X F is along the heading of the quadrotor, Z F points to the earth. The rotation matrix from where ∂v ∂t F is the derivative of v in F , F is the force acting on the drone and Ω is angular velocity of Frame F with respect to earth frame E. During the arc, the drone's heading is supposed to be tangent to the arc to maintain a zero sideslip turn, the angular velocity of F with respect to E should be To express equation 23 in scalar form, we have where T is the thrust of the drone and During the arc, we would like to keep the altitude not changed, which in this frame means at the same height as at the start of the arc. Thus to make ∂v F z ∂t = 0 in equation 25, we can have In the arc, Similar to the straight part, pitch command θ c is also fixed to a certain value. To conclude, during the arc maneuver, the control inputs are The flight test result can be found in Figure 21. The drone enters the arc at red points and starts feed-forward control with the control strategy in equation 29. In a feed-forward arc maneuver, θ c = −5 • , r = 1.5m and each arc takes around 2s. Before entering the arc, the drone is steered by the feedback control strategy in equation 22. At the same time, visual navigation is running to estimate the states of the drone which also tells the drone where to start to turn an arc. Thus in each lap, red points are slightly different from each other which is caused by filtering error. It could also be seen that the endpoints (yellow points) of arc maneuver has a distribution with larger variance compared to that at entry points. It is mainly because that state prediction in principle is an integration based method, which may be highly affected by the accuracy of initial states. In table 2, it is clear that the error at entry point in the x direction is much less than the one in the y direction. As a result, the error in the y axis at the endpoints is larger than that in the x axis. This error can also be caused by model inaccuracy and the disturbance during the arcs. Thus, the pure feed-forward control strategy is only effective for short time durations. In our case, 2s is enough to steer the drone to the next gate where visual navigation is available and feedback control strategy can be switched on again. After the arc, the drone will detect the gate again and the detection will correct the filtering error. Thus, there will be a jump in the filtering result ( Figure 22). For the feed-back controller, the control target is to steer the drone to y = 0. In fact, this is a simple step signal tracking or a way-point tracking problem. Simulations are done to check the feasibility of the proposed controller to steer the drone through the gate. The simplified drone model is Figure 22: During the arc maneuver, the drone will not detect the gate. Thus, the state estimation is purely based on the prediction (red arc). However, due to the model inaccuracy and the sensors' bias, the predicted trajectory will diverge from the ground-truth trajectory (blue curve). After the turn, the drone will detect the gate again and the estimated position will jump to the ground-truth position. In fact, although there is a jump in the state estimation (red curve), the real-world trajectory should be continued (blue curve). where x and y are the position of the drone and v y is the velocity of the drone in y direction. In this model, we neglect z because in the real-world flight, the altitude is controlled by a separate controller which can keep the altitude to be a constant. v x is the input of the model because in our real-world experiment setup, θ is set to be a constant which leads to a constant velocity in x direction. φ is another input of the model. A PD controller is employed to steer the drone to y = 0 by φ = k v (k p (0 − y) − v y ), where k p = 1 and k v = 2. The simulation result can be found in Figure 23 Figure 23 is the simulation result with the forward speed v x = 1.5m/s and v x = 2m/s. In each figure, 10, 000 trajectories are simulated with their own initial points x 0 ∈ , y 0 ∈ . The points to the left of the black curves are the initial points from which the drone can pass through the gates. It can be seen that when the drone's speed gets higher, the number of the feasible initial points gets smaller. In other words, the drone needs more distance to adjust its position to pass through the gate. In our real-world experiment set up, for example, the forward speed is around 1.5m/s and the position error in y axis is 0.8m as shown in Table 2. The drone needs a margin of 2m in x direction to steer itself through the gate safely. Full track experiment setup and result In the previous sections, we have discussed the proposed visual navigation method and control strategies and the results of the experiments designed to verify our method in laboratory environment. In this section, we integrate all subsystems and move to a more challenging and realistic environment, a showroom in the basement of the Faculty of Aerospace Engineering, TU Delft where many aircraft components are displayed, to test the performance of our method. In this showroom, we placed five 1m × 1m gates in the corridor which is surrounded by dense showcases and aircraft components such as aircraft flaps, rudders, yokes and so on. The five gates are shown in Figure 24. Compared to the IROS 2017 autonomous drone racing, this track has smaller gates, much denser obstacles and the background of the gates is complex which in all put many challenges for the drone to fly the whole track fully autonomously. In this track, the drone takes off from ground and flies through the whole track with θ = −5 • or θ = −7 • , which lead to the forward speed to be around 1.5m/s and 1.8m/s respectively, which is faster than the winner in autonomous drone race in 2016 who flew through 10 gates with 86s , whose velocity is around 0.5m/s. The onboard images and the flight result can be found in Figure 24 and Figure 25. The environment is not equipped with a ground truth position system, therefore only estimated data is available. However, analyzing the estimated trajectory does give an insight of the flight and estimation performance in general. It can be observed that during some parts of the track some rapid changes in position occur. These jumps in position estimate occur once the next gate is first detected after a long period without seeing a gate. During this period the position estimation only relies on the integration of the drag based velocity. Errors in this prediction introduce an accumulating drift in the position estimate, which is corrected when a gate detection is available again. After the correction, the lateral position controller has enough time to steer the drone through the gate. During the experiments, although in most cases the drone can pass through the gate, there are still some failure cases (the drone crashes to the gate). They are caused by non-detection of the gates or very late detection when the drone is already very close to the gate. In these two scenarios, the drone has to control itself purely based on prediction or the drone has no time to adjust its position. In our basement experiment, the poor quality of the onboard images leads to these non-detection problems. In terms of the open-loop control strategy, with the estimated linear aerodynamic model, we find that the control performance is very accurate in a short time. For example, after the second gate, there is a pole that is close to the arc (Figure 25), but the drone never crashed into this pole. Conclusion and future work In this paper, we present a systematic scheme to accomplish the task of autonomous drone racing, as held by IROS in 2017. In our work, a novel and computationally efficient gate detection method is implemented onboard a Parrot Bebop 1 drone with all algorithms executed at 20 HZ frequency. With the detected gates, we employ a pose estimation scheme combining onboard AHRS estimation, which has higher accuracy than the commonly used P3P method. Then a more efficient Kalman filter is implemented onboard which converges faster than a traditional 15-states Kalman filter. In terms of the control strategy, a prediction-based feed-forward control strategy is used to control the drone to fly in the short time intervals without position measurements. And finally, the whole system is tested in a showroom with dense showcases and aircraft components. In this flight test, the average speed reached 1.5m/s which is higher than the speeds exhibited at the autonomous drone races in 2016 and 2017. There are multiple directions for future work. For instance, the visual process is essentially based on color detection. Higher robustness in the visual processing may be reached by employing machine learning methods in computer vision. Also, a PD-controller is used to steer the drone through the gate, which makes the trajectory sub-optimal and can on the long term lead to overshoot. This can be improved, e.g., by utilizing optimal control methods. We hope that such future improvements will allow further augmenting the flight speed, hopefully approaching human pilot performance. where R k is sensor noise covariance matrix. (5) Update the covariance matrix of state estimation error
from modules.world import World, Landmark, Map, Goal from modules.grid_map_2d import GridMap2D from modules.robot import IdealRobot from modules.sensor import IdealCamera, Camera from modules.agent import Agent, EstimationAgent, GradientAgent from modules.gradient_pfc import GradientPfc from modules.mcl import Particle, Mcl import math import numpy as np if __name__ == '__main__': ###name_indent time_interval = 0.1 world = World(200, time_interval, debug=False, recording_file_name='std(0.3_0.3)_回避行動0秒-1秒', playback_speed=3) # world = World(150, time_interval, debug=False) m = Map() ### 専有格子地図を追加 ### grid_map = GridMap2D('CorridorGimp_200x200', origin=[-5.0, -5.0]) # world.append(grid_map) ##ゴールの追加## goal = Goal(1.75,3.0) #goalを変数に world.append(goal) ### ロボットを作る ### # 初期位置 init_pose = np.array([-4.5, 0.5, 0]) # 初期位置推定のばらつき init_pose_stds = np.array([0.3, 0.3, 0.01]) # モーションアップデートのばらつき # motion_noise_stds = {"nn":0.19, "no":0.001, "on":0.13, "oo":0.2} motion_noise_stds = {"nn":0.01, "no":0.01, "on":0.01, "oo":0.01} # 推定器 estimator = Mcl(m, init_pose, 300, motion_noise_stds=motion_noise_stds, init_pose_stds=init_pose_stds) # エージェント agent = GradientPfc(time_interval, 0.1, 0.5, np.deg2rad(90), estimator, grid_map, goal, magnitude=2, draw_direction=False, draw_p_gradient=False) # ロボット robot = IdealRobot(init_pose, sensor=Camera(m), agent=agent) world.append(robot) world.draw()
// Add adds an identifier to the namespace. If the name is already taken, // an error is returned. func (ns Namespace) Add(id string) error { if ns[id] { return fmt.Errorf("identifier redeclared: %v", id) } ns[id] = true return nil }
/** editable property representing a primitive that is directly editable */ class EditablePrimitiveProperty extends EditableProperty { /** property's units */ final private String UNITS; /** Constructor */ protected EditablePrimitiveProperty( final String pathPrefix, final Object target, final PropertyDescriptor descriptor ) { super( pathPrefix, target, descriptor ); UNITS = fetchUnits(); } /** fetch the units */ private String fetchUnits() { // first check to see if there is a Units annotation (ideal when known at compile time) on the accessor method and use it otherwise fallback to fetching by unit property methods final Method readMethod = PROPERTY_DESCRIPTOR.getReadMethod(); final Units units = readMethod != null ? readMethod.getAnnotation( Units.class ) : null; if ( units != null ) { return units.value(); } else { // unit property methods allow for dynamic units (i.e. units not known at runtime) // form the accessor as get<PropertyName>Units() replacing <PropertyName> with the property's name whose first character is upper case final char[] nameChars = getName().toCharArray(); nameChars[0] = Character.toUpperCase( nameChars[0] ); // capitalize the first character of the name final String propertyName = String.valueOf( nameChars ); // property name whose first character is upper case // first look for a method of the form get<PropertyName>Units() taking no arguments and returning a String final String unitsAccessorName = "get" + propertyName + "Units"; try { final Method unitsAccessor = TARGET.getClass().getMethod( unitsAccessorName ); if ( unitsAccessor.getReturnType() == String.class ) { return (String)unitsAccessor.invoke( TARGET ); } } catch ( NoSuchMethodException exception ) { // fallback look for a method of the form getUnitsForProperty( String name ) returning a String try { final Method unitsAccessor = TARGET.getClass().getMethod( "getUnitsForProperty", String.class ); if ( unitsAccessor.getReturnType() == String.class ) { return (String)unitsAccessor.invoke( TARGET, getName() ); } return ""; } catch( Exception fallbackException ) { return ""; } } catch( Exception exception ) { System.out.println( exception ); return ""; } return ""; } } /** determine whether the property is a container */ public boolean isContainer() { return false; } /** determine whether the property is a primitive */ public boolean isPrimitive() { return true; } /** Set the value for this property */ public void setValue( final Object value ) { if ( TARGET != null && PROPERTY_DESCRIPTOR != null ) { final Method setter = PROPERTY_DESCRIPTOR.getWriteMethod(); try { setter.invoke( TARGET, value ); } catch( Exception exception ) { throw new RuntimeException( "Cannot set value " + value + " on target: " + TARGET + " with descriptor: " + PROPERTY_DESCRIPTOR.getName(), exception ); } } else { if ( TARGET == null && PROPERTY_DESCRIPTOR == null ) { throw new RuntimeException( "Cannot set value " + value + " on target because both the target and descriptor are null." ); } else if ( TARGET == null ) { throw new RuntimeException( "Cannot set value " + value + " on target with descriptor: " + PROPERTY_DESCRIPTOR.getName() + " because the target is null." ); } else if ( PROPERTY_DESCRIPTOR == null ) { throw new RuntimeException( "Cannot set value " + value + " on target: " + TARGET + " because the property descriptor is null." ); } } } /** Get the units */ public String getUnits() { return UNITS; } /** Get a string represenation of this property */ public String toString() { return getPath() + ": " + getValue() + " " + getUnits(); } }
<gh_stars>1-10 # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Nbdkit(AutotoolsPackage): """NBD(Network Block Device) is a protocol for accessing Block Devices (hard disks and disk-like things) over a Network. nbdkit is a toolkit for creating NBD servers.""" homepage = "https://github.com/libguestfs/nbdkit" url = "https://github.com/libguestfs/nbdkit/archive/v1.23.7.tar.gz" version('1.23.7', sha256='70909721f60f06abadfac8646b37f942ceeaf73ce88909ab48402175ae1b6391') version('1.23.6', sha256='5a62cbcc41143a90c204d4a48ebe13225f21776fbc4e8fe8ca59531bb1c751fc') version('1.23.5', sha256='d07aa309b7d6f088a491fbbe645f23d56cd6e68995c4b73fb5bb609fc6b0de53') version('1.23.4', sha256='6581e6cc6dbcb42451abad096efd4e1016b3a0f0d1c7a1724d0a76259ab96429') version('1.23.3', sha256='78f14b00c771733047abcf882e715f62bb19820a6571cae0ccb5f965054697c6') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build')
def _set_commands(click_group: click.core.Group): config_path = _get_config_path() config = ConfigParser.from_files(config_path) option_names = config.options('fetchme') for i in option_names: func = _get_command_func(i, config) click_group.command(name=i)(click.pass_context(func))
<filename>.scannerwork/css-bundle/node_modules/micromark/lib/parse.d.ts import {ParseOptions, Parser} from './shared-types' declare function createParser(options?: ParseOptions): Parser export default createParser
def read_string(self, length_format): length = self.read(length_format) bs = self.read_farray('s', length)[0] return bs.decode('utf_8')
/** * An attribute modifier specifically for body tags. * <p> * Panels have associated markup files and if they contain <code>&lt;wicket:head&gt;</code> and * <code>&lt;body onLoad="..."&gt;</code> then the body's <code>onLoad</code> attribute will be * appended to the page's onLoad attribute. That accretion happens by means of an AttributeModifier * which the Panel adds to the body container. In the case where the Panel is removed or replaced, * then the AttributeModifier must be removed or at least disabled. That exactly is what this * special purpose AttributeModifier does, i.e. it disables itself if the owner component (Panel) is * removed or replaced. * * @author Juergen Donnerstag */ public final class BodyTagAttributeModifier extends AttributeModifier { private static final long serialVersionUID = 1L; /** * Make sure we don't keep a reference to the component longer than really needed. */ private transient WeakReference<Component> componentReference; /** * Create a new attribute modifier with the given attribute name and model to replace with. The * additional boolean flag specifies whether to add the attribute if it is not present. * * @param attribute * The attribute name to replace the value for * @param addAttributeIfNotPresent * Whether to add the attribute if it is not present * @param replaceModel * The model to replace the value with * @param behaviorOwner * The component which created (owns) the modifier */ public BodyTagAttributeModifier(final String attribute, final boolean addAttributeIfNotPresent, final IModel< ? > replaceModel, final Component behaviorOwner) { super(attribute, addAttributeIfNotPresent, replaceModel); init(behaviorOwner); } /** * Create a new attribute modifier with the given attribute name and model to replace with. The * attribute will not be added if it is not present. * * @param attribute * The attribute name to replace the value for * @param replaceModel * The model to replace the value with * @param behaviorOwner * The component which created (owns) the modifier */ public BodyTagAttributeModifier(final String attribute, final IModel< ? > replaceModel, final Component behaviorOwner) { super(attribute, replaceModel); init(behaviorOwner); } /** * Create a new attribute modifier with the given attribute name and expected pattern to match * plus the model to replace with. A null pattern will match the attribute regardless of its * value. The additional boolean flag specifies whether to add the attribute if it is not * present. * * @param attribute * The attribute name to replace the value for * @param pattern * The pattern of the current attribute value to match * @param addAttributeIfNotPresent * Whether to add the attribute if it is not present and the replacement value is not * null * @param replaceModel * The model to replace the value with * @param behaviorOwner * The component which created (owns) the modifier */ public BodyTagAttributeModifier(final String attribute, final String pattern, final boolean addAttributeIfNotPresent, final IModel< ? > replaceModel, final Component behaviorOwner) { super(attribute, pattern, addAttributeIfNotPresent, replaceModel); init(behaviorOwner); } /** * Create a new attribute modifier with the given attribute name and expected pattern to match * plus the model to replace with. A null pattern will match the attribute regardless of its * value. The attribute will not be added if it is not present. * * @param attribute * The attribute name to replace the value for * @param pattern * The pattern of the current attribute value to match * @param replaceModel * The model to replace the value with * @param behaviorOwner * The component which created (owns) the modifier */ public BodyTagAttributeModifier(final String attribute, final String pattern, final IModel< ? > replaceModel, final Component behaviorOwner) { super(attribute, pattern, replaceModel); init(behaviorOwner); } /** * Initialize * * @param behaviorOwner * The component which creates (owns) the modifier */ private void init(final Component behaviorOwner) { if (behaviorOwner != null) { componentReference = new WeakReference<Component>(behaviorOwner); } } /** * @see org.apache.wicket.AttributeModifier#newValue(java.lang.String, java.lang.String) */ @Override protected String newValue(final String currentValue, final String replacementValue) { // If no behavior owner has been provided, than behave as if this // were a standard normal attribute modifier if (componentReference != null) { // Get the owner of the attribute modifier (e.g. the Panel, not the // Body) final Component behaviorOwner = componentReference.get(); // If case the components memory has been GCed already, than disable // the attribute modifier and return the attribute value unchanged. if (behaviorOwner == null) { setEnabled(false); return currentValue; } // It must have a Page, otherwise one of its parents has been // removed. No Page, than disable the attribute modifier and // return the attribute value unchanged. // Component.findPage() is 'protected'. But this works as well. if (!(behaviorOwner instanceof Page) && behaviorOwner.findParent(Page.class) == null) { setEnabled(false); return currentValue; } // And the "Panel" must be visible. Wicket core tests only // that the body (the component the attribute modifier is // attached to) is visible. if (behaviorOwner.isVisibleInHierarchy() == false) { return currentValue; } } if (currentValue != null && !currentValue.trim().endsWith(";")) { return currentValue + ";" + replacementValue; } return (currentValue == null ? replacementValue : currentValue + replacementValue); } /** * AttributeModifiers must be Serialzable but WeakReferences are not. Hence, we need to * implement our read/write methods to properly support it. * * @see Serializable * * @param inputStream * The input stream to read the object from * @throws IOException * @throws ClassNotFoundException */ private void readObject(final ObjectInputStream inputStream) throws IOException, ClassNotFoundException { inputStream.defaultReadObject(); final Object object = inputStream.readObject(); if (object != null) { componentReference = new WeakReference<Component>((Component)object); } } /** * AttributeModifiers must be Serialzable but WeakReferences are not. Hence, we need to * implement our read/write methods to properly support it. * * @see Serializable * * @param outputStream * @throws IOException */ private void writeObject(final ObjectOutputStream outputStream) throws IOException { outputStream.defaultWriteObject(); if (componentReference != null) { outputStream.writeObject(componentReference.get()); } else { outputStream.writeObject(null); } } }
Effects of Red Mud Addition in the Microstructure, Durability and Mechanical Performance of Cement Mortars Recently, there has been a great effort to incorporate industrial waste into cement-based materials to reach a more sustainable cement industry. In this regard, the Bayer process of obtaining alumina from bauxite generates huge amounts of waste called red mud. Few research articles have pointed out the possibility that red mud has pozzolanic activity. In view of that, the objective of this research is to analyse the short-term effects in the pore structure, mechanical performance and durability of mortars which incorporate up to 20% of red mud as a clinker replacement. As a reference, ordinary Portland cement and fly ash Portland cement mortars were also studied. The microstructure was characterised through mercury intrusion porosimetry and non-destructive impedance spectroscopy, which has not previously been used for studying the pore network evolution of red mud cement-based materials. The possible pozzolanic activity of red mud has been checked using differential scanning calorimetry. The non-steady state chloride migration coefficient and the mechanical properties were studied too. According to the results obtained, the addition of red mud entailed a greater microstructure refinement of the mortar, did not worsen the resistance against chloride ingress and reduced the compressive strength compared to control binders.
/* * Copyright (c) "Neo4j" * Neo4j Sweden AB [http://neo4j.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.io.pagecache.stress; import static org.assertj.core.api.Assertions.assertThat; import java.io.IOException; import org.neo4j.io.pagecache.PageCursor; public class RecordFormat { private final int numberOfThreads; private final int pagePayloadSize; private final int fieldSize; private final int checksumFieldOffset; private final int recordSize; public RecordFormat(int numberOfThreads, int pagePayloadSize) { this.numberOfThreads = numberOfThreads; this.pagePayloadSize = pagePayloadSize; this.fieldSize = Long.BYTES; this.checksumFieldOffset = numberOfThreads * fieldSize; this.recordSize = checksumFieldOffset + fieldSize; // extra field for keeping the checksum. } public int getRecordSize() { return recordSize; } public int getRecordsPerPage() { return pagePayloadSize / getRecordSize(); } public int getFilePayloadSize() { return getRecordsPerPage() * getRecordSize(); } /** * Assume the given cursor is writable and has already been positioned at the record offset. */ public long incrementCounter(PageCursor cursor, int threadId) { int recordOffset = cursor.getOffset(); int fieldOffset = recordOffset + (fieldSize * threadId); int checksumOffset = recordOffset + checksumFieldOffset; long newValue = 1 + cursor.getLong(fieldOffset); cursor.putLong(fieldOffset, newValue); cursor.putLong(checksumOffset, 1 + cursor.getLong(checksumOffset)); return newValue; } /** * Sum up the fields for the given thread for all records on the given page. */ public long sumCountsForThread(PageCursor cursor, int threadId) throws IOException { int recordsPerPage = getRecordsPerPage(); int fieldOffset = fieldSize * threadId; long sum; do { sum = 0; for (int i = 0; i < recordsPerPage; i++) { sum += cursor.getLong((i * recordSize) + fieldOffset); } } while (cursor.shouldRetry()); return sum; } /** * Verify the checksums on all the records on the given page */ public void verifyCheckSums(PageCursor cursor) throws IOException { int recordsPerPage = getRecordsPerPage(); for (int i = 0; i < recordsPerPage; i++) { int recordOffset = i * recordSize; long expectedChecksum; long actualChecksum; do { actualChecksum = 0; for (int j = 0; j < numberOfThreads; j++) { actualChecksum += cursor.getLong(recordOffset + (j * fieldSize)); } expectedChecksum = cursor.getLong(recordOffset + checksumFieldOffset); } while (cursor.shouldRetry()); String msg = "Checksum for record " + i + " on page " + cursor.getCurrentPageId(); assertThat(actualChecksum).describedAs(msg).isEqualTo(expectedChecksum); } } }
def load_yaml(filename): def parse_value(value): if "#" in value: value = value[:value.index("#")] value = value.strip(" \n") if not value: return None if value.lower() == "true": return True if value.lower() == "false": return False try: return int(value) except: try: return float(value) except: return value result = {} current_key = None with open(filename) as f: for line in f.readlines(): if ":" in line: key, value = line.split(":", 1) key = key.strip() current_key = key result[key] = parse_value(value) elif line.strip().startswith("-"): value = line.strip(" -\n") if not isinstance(result[current_key], list): result[current_key] = [parse_value(value)] else: result[current_key].append(parse_value(value)) return result
Socio-economic status in fl uences the relationship between obesity and antenatal depression: Data from a prospective cohort study of Disorders Background: Obesity has been associated with increased risk of antenatal depression, but little is known about this relationship. This study tested whether socio-economic status (SES) in fl uences the relationship between obesity and antenatal depression. Methods: Data were taken from the Screening for Pregnancy Endpoints (SCOPE) cohort. BMI was calculated from measured height and weight at 15 7 1 weeks' gestation. Underweight women were ex- cluded. SES was indicated by self-reported household income (dichotomised around the median: low SES r d 45,000; high SES 4 d 45,000). Antenatal depression was de fi ned as scoring Z 13 on the Edinburgh Postnatal Depression Scale at both 15 7 1 and 20 7 1 weeks' gestation, to identify persistently elevated symptoms of depression. Results: Five thousand fi ve hundred and twenty two women were included in these analyses and 5.5% had persistently elevated antenatal depression symptoms. There was a signi fi cant interaction between SES and BMI on the risk of antenatal depression (p ¼ 0.042). Among high SES women, obese women had approxi- mately double the odds of antenatal depression than normal weight controls (AOR 2.11, 95%CI 1.16 – 3.83, p ¼ 0.014, adjusted for confounders). Among low SES women there was no association between obesity and antenatal depression. The interaction effect was robust to alternative indicators of SES in sensitivityanalyses. Limitations: 1) Antenatal depression was assessed with a self-reported screening measure; and 2) potential mediators such as stigma and poor body-image could not be examined. Conclusions: Obesity was only associated with increased risk of antenatal depression among high SES women in this sample. Healthcare professionals should be aware that antenatal depression is more common among low SES women, regardless of BMI category. & (http://creativecommons.org/licenses/by/4.0/).
import unittest from metaci.users.models import User class TestUser(unittest.TestCase): def setUp(self): self.user = User(username="testuser") def test__str__(self): self.assertEqual( self.user.__str__(), "testuser", ) def test_get_absolute_url(self): self.assertEqual(self.user.get_absolute_url(), "/users/testuser/")
import { FormlyFieldConfig } from '@ngx-formly/core'; import { textField } from './text.field'; import { cityField, countryField, stateField, zipCodeField } from './text.additional.field'; import { flexLayoutWrapper } from '../../wrapper/wrapper'; import { FieldConfig } from '../../field'; import { repeatArrayField } from '../array/array.field'; export const ADDRESS_LINE_MAX_LENGTH = 100; export function addressFormlyFields(): FormlyFieldConfig[] { return [ textField({ key: 'line1', label: 'Line 1', placeholder: '', required: false, autocomplete: 'address-line1', maxLength: ADDRESS_LINE_MAX_LENGTH }), textField({ key: 'line2', label: 'Line 2', placeholder: '', required: false, autocomplete: 'address-line2', maxLength: ADDRESS_LINE_MAX_LENGTH }), flexLayoutWrapper( [ { field: cityField({}) }, { field: stateField({}) }, { field: zipCodeField({}) }, { field: countryField({}) } ], { size: 1, relative: true } ) ]; } export type AddressFieldConfig = FieldConfig; export function addressField({ key = 'address', required = false }: Partial<AddressFieldConfig> = {}): FormlyFieldConfig { return { key, wrappers: ['section'], props: { label: 'Address', placeholder: '', required }, fieldGroup: addressFormlyFields() }; } export interface AddressListFieldConfig extends FieldConfig { maxAddresses?: number; } export function addressListField({ key = 'addresses', required = false, maxAddresses = 6 }: Partial<AddressListFieldConfig> = {}): FormlyFieldConfig { return repeatArrayField({ key, required, label: 'Addresses', labelForField: 'Address', addText: 'Add Address', removeText: 'Remove Address', maxLength: maxAddresses, repeatFieldGroup: addressFormlyFields() }); }
def script_batch_masking(config): core.error.ifas_info("Running the batch script for all mask scripts. " "All mask scripts will be run according to the " "configuration file.") if (len(core.config.extract_configuration( config_object=config, keys=['mask_file_name'])) != 0): core.error.ifas_warning(core.error.ConfigurationWarning, ("The configuration parameters contain a " "`mask_file_name` that is not empty. Batch " "script masking does not support a single " "file name for all masks. It will be " "ignored.")) script_functions = core.runtime.get_script_functions() script_mask_prefix = 'script_mask' for keydex, scriptdex in script_functions.items(): if (script_mask_prefix in keydex): core.error.ifas_info("Calling the script mask function: {script}" .format(script=keydex)) config['mask_file_name'] = core.strformat.remove_prefix( string=keydex, prefix='script_') __ = scriptdex(config=config) elif (script_mask_prefix not in keydex): continue else: raise core.error.BrokenLogicError return None
/** * Removes a team category.<br> * <b>Warning:</b> Removes all the teams in the category. * @param tcId id of the removed team category */ public void removeTeamCategory(int tcId) { TeamCategory cat = teamCategories.remove(tcId); for(Team t : cat.getAllTeams()) { t.removeAllContestants(); } teams.remove(cat); }
Nanometer Interconnect Test Structure for Modeling of Process Variation With the interconnection density and doubling the number of layers in VLSI, Interconnect line width,pitch,and the thickness of the dielectric layer will changed within the same chip caused by the process variation. and the interconnect parasitics changes ultimately affect circuit performance and yield.IC designers need an accurate BEoL corner model to help circuit design. Standard Interconnect Performance Parameters (SIPPs) is standard method to measure ultra-large scale integrated circuit BEOL performance. Designed parallel plate, layer-skipping parallel plate, comb meander, comb meander for via resistance test structures to extract SIPPs according to their sensitivity differences to different test structures, and realized them in CIF format file with High-level Perl language automatically. Then change to GDSII format file that wafer used widely by Cadence layout software, and pass electrical rule checks. Greatly improved the efficiency of test structure’s design and realized. Lay the foundations for formulation of Design for Manufacturability physical design rules and further research interconnection statistical models under nanometer technology with more unique physical phenomena.
import pygame from math import sqrt from random import randint class Box: SPEED = 10 def __init__(self, pos_offsets: dict, square_length: int, color: tuple, surface: pygame.Surface, debug=False): self.pos_offsets = pos_offsets self.length = square_length self.tile_num = int(sqrt(len(pos_offsets.keys()))) self.current_tile = (0, 0) self.pos_x = pos_offsets[self.current_tile][0] self.pos_y = pos_offsets[self.current_tile][1] self.color = color self.surface = surface self.detect_mode = False self.debug_mode = debug def draw(self): pygame.draw.rect(self.surface, self.color, (self.pos_x, self.pos_y, self.length, self.length)) ''' if self.debug_mode: print(f'drawn : {(self.pos_x, self.pos_y, self.pos_x + self.length, self.pos_y + self.length)}')''' def set_tile_pos(self, tile_x, tile_y): assert 0 <= tile_x < self.tile_num and 0 <= tile_y < self.tile_num self.current_tile = (tile_x, tile_y) self.pos_x, self.pos_y = self.pos_offsets[self.current_tile] if self.debug_mode: print(f'current tile : {self.current_tile}') print(f'current coordinate : {(self.pos_x, self.pos_y)}') def set_color(self, color: tuple): self.color = color def toggle_detect_mode(self): self.detect_mode = not self.detect_mode class action: @staticmethod def __get_valid_tile_move(obj: Box): length = obj.tile_num tile_x, tile_y = obj.current_tile moves = [(1, 0), (-1, 0), (0, 1), (0, -1)] next_moves = [] for move_x, move_y in moves: if 0 <= tile_x + move_x < length and 0 <= tile_y + move_y < length: next_moves.append((tile_x + move_x, tile_y + move_y)) return next_moves @staticmethod def move_left(obj: Box): tile_x, tile_y = obj.current_tile if 0 < tile_y: tile_y -= 1 obj.set_tile_pos(tile_x, tile_y) @staticmethod def move_right(obj: Box): length = obj.tile_num tile_x, tile_y = obj.current_tile if tile_y < length - 1: tile_y += 1 obj.set_tile_pos(tile_x, tile_y) @staticmethod def move_down(obj: Box): length = obj.tile_num tile_x, tile_y = obj.current_tile if tile_x < length - 1: tile_x += 1 obj.set_tile_pos(tile_x, tile_y) @staticmethod def move_up(obj: Box): tile_x, tile_y = obj.current_tile if 0 < tile_x: tile_x -= 1 obj.set_tile_pos(tile_x, tile_y) @staticmethod def move_random(obj: Box): next_moves = action.__get_valid_tile_move(obj) idx = randint(0, len(next_moves) - 1) obj.set_tile_pos(next_moves[idx][0], next_moves[idx][1]) class change_color: def __init__(self, color: tuple): self.color = color def __call__(self, obj: Box): obj.set_color(self.color) class Sequential: def __init__(self, *args): self.args = args def __call__(self, obj: Box): for arg in self.args: arg(obj)
def show_ui_console(self, show): p_geo = GXContext._get_tls_geo() return p_geo.show_ui_console(show)
<gh_stars>0 package com.example.conor.a1rmtracker; import android.content.ContentValues; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.util.Log; import android.util.SparseArray; /** * Created by Conor on 15/11/16. */ public class DatabaseHelper extends SQLiteOpenHelper { Context context; // DB Info private static final String DATABASE_NAME = "1RM_Tracker"; private static final int DATABASE_VERSION = 1; //List of tables private static final String TABLE_FORMULA = "Formula"; private static final String TABLE_EXERCISELOG = "ExerciseLog"; //Column List private static final String KEY_ID = "_id"; private static final String KEY_NAME = "Name"; public static final String KEY_DATE = "date"; public static final String KEY_EXERCISE = "exercise"; public static final String KEY_WEIGHT = "weight"; public static final String KEY_REPS = "reps"; //Sparse array to map int -> string equivalent //SparseArray: More efficient HashMap for mapping ints -> objects. Suggested by Android Studio when I tried using a HashMap public static final SparseArray<String> numMap = new SparseArray<String>() {{ put(1, "One"); put(2, "Two"); put(3, "Three"); put(4, "Four"); put(5, "Five"); put(6, "Six"); put(7, "Seven"); put(8, "Eight"); put(9, "Nine"); put(10, "Ten"); }}; //Creates private static final String CREATE_TABLE_FORMULA = "CREATE TABLE " + TABLE_FORMULA + "(" + KEY_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " + KEY_NAME + " TEXT UNIQUE NOT NULL, " + numMap.get(1) + " REAL, " + numMap.get(2) + " REAL, " + numMap.get(3) + " REAL, " + numMap.get(4) + " REAL, " + numMap.get(5) + " REAL, " + numMap.get(6) + " REAL, " + numMap.get(7) + " REAL, " + numMap.get(8) + " REAL, " + numMap.get(9) + " REAL, " + numMap.get(10) + " REAL" + ");"; private static final String CREATE_TABLE_EXERCISELOG = "CREATE TABLE " + TABLE_EXERCISELOG + "(" + KEY_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " + KEY_DATE + " TEXT NOT NULL, " + KEY_EXERCISE + " TEXT NOT NULL, " + KEY_WEIGHT + " REAL NOT NULL, " + KEY_REPS + " INTEGER NOT NULL" + ");"; public DatabaseHelper(Context context){ super(context, DATABASE_NAME, null, DATABASE_VERSION); this.context = context; } @Override public void onCreate(SQLiteDatabase db){ db.execSQL(CREATE_TABLE_EXERCISELOG); db.execSQL(CREATE_TABLE_FORMULA); //Initial inserts into Formula table //Retrieve values from nested array. Ref: http://stackoverflow.com/questions/4326037/android-resource-array-of-arrays Resources res = context.getResources(); TypedArray formulae = res.obtainTypedArray(R.array.formulae); for(int i = 0; i < formulae.length(); i++){ int id = formulae.getResourceId(i, -1); if(id != -1) { String[] data = res.getStringArray(id); ContentValues vals = new ContentValues(); vals.put(KEY_NAME, data[0]); for (int j = 1; j < data.length; j++) { vals.put(numMap.get(j), data[j]); } db.insert(TABLE_FORMULA, null, vals); }else{ //TODO: Throw exception } } } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL("DROP TABLE IF EXISTS " + TABLE_EXERCISELOG); db.execSQL("DROP TABLE IF EXISTS " + TABLE_FORMULA); onCreate(db); } /***** EXERCISE METHODS ******/ public long logExercise(String date, String exercise, float weight, int reps){ ContentValues vals = new ContentValues(); vals.put(KEY_DATE, date); vals.put(KEY_EXERCISE, exercise); vals.put(KEY_WEIGHT, weight); vals.put(KEY_REPS, reps); SQLiteDatabase db = this.getWritableDatabase(); return db.insert(TABLE_EXERCISELOG, null, vals); } public Cursor getAllExerciseLogs(){ String query = "SELECT * FROM " + TABLE_EXERCISELOG; SQLiteDatabase db = this.getReadableDatabase(); Cursor c = db.rawQuery(query, null); return c; } /***** FORMULA METHODS *****/ //Get the average percent for a given rep public float getAvg(int rep){ float ret = -1; SQLiteDatabase db = this.getReadableDatabase(); String query = "SELECT AVG(" + numMap.get(rep) + ") FROM " + TABLE_FORMULA; Cursor res = db.rawQuery(query, null); if(res != null){ res.moveToFirst(); ret = res.getFloat(0); }else{ //Throw exception } return ret; } }
def durationSeconds(self): f=0.0 if self.isVideo() or self.isAudio(): if self.__dict__['duration']: try: f=float(self.__dict__['duration']) except Exception as e: print "None numeric duration" return f
<gh_stars>1-10 package com.pj.squashrestapp.dto; import java.util.ArrayList; import java.util.List; import lombok.Getter; /** */ @Getter public class XpPointsForTable { private final String type; private final String split; private final int numberOfPlayers; private final List<XpPointsDto> xpPoints; public XpPointsForTable(final String type, final String split, final int numberOfPlayers) { this.type = type; this.split = split; this.numberOfPlayers = numberOfPlayers; this.xpPoints = new ArrayList<>(); } public void addPoints(final XpPointsDto xpPointsDto) { this.xpPoints.add(xpPointsDto); } @Override public String toString() { return split; } }
/** * Distributed lock depends on File system. * Usage: * <code> * Lock lock = Watcher.getInstance().acquire(); * ... * try { * if (lock != null) { * lock.lock(); * * // TODO * logger.info(Thread.currentThread().getName() + " is selling #" + (tickets--) + " with Lock#" + lock.id()); * } * } catch (ApplicationException e) { * e.printStackTrace(); * } finally { * if (lock != null) { * try { * lock.unlock(); * } catch (ApplicationException e) { * e.printStackTrace(); * } * } * } * </code> * * @author James Zhou */ public class DistributedLock implements Lock { private static final Logger logger = Logger.getLogger(Watcher.class.getName()); private String id; private final Watcher watcher = Watcher.getInstance(); public DistributedLock() { this.id = UUID.randomUUID().toString(); // Set event listener. this.watcher.addListener(new LockEventListener(this)); } public DistributedLock(byte[] idb) { this.id = new LockKey(idb).value(); // Set event listener. this.watcher.addListener(new LockEventListener(this)); } @Override public void lock() throws ApplicationException { // If try lock successfully, then the lock does exist, then don't need to lock. // And continue to work on the next steps. if (!tryLock()) { lock(); } } @Override public boolean tryLock() throws ApplicationException { return tryLock(0L, null); } @Override public boolean tryLock(long timeout, TimeUnit unit) throws ApplicationException { // If the lock is existing, then wait for it to be released. if (watcher.watch(this)) { try { if (timeout > 0) watcher.waitFor(this.id, timeout, unit); else watcher.waitFor(this.id); } catch (InterruptedException e) { throw new ApplicationException(e.getMessage(), e.getCause()); } } else { // Register the lock. this.watcher.register(this); } // If get this step, that means the lock has not been registered. and the thread can work on the next steps. return true; } @Override public void unlock() throws ApplicationException { if (watcher.watch(this)) { watcher.unregister(this); } } @Override public String id() { return this.id; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DistributedLock other = (DistributedLock) obj; if (id == null) { if (other.id != null) return false; } else if (!id.equals(other.id)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((id == null) ? 0 : id.hashCode()); return result; } }
These women love to get high. In the ’70s, it was Avon parties, in the ’80s, Tupperware...then book clubs in the ’90s and wine parties in the 2000s. And now, in the new age of marijuana legalization sweeping the United States, women of all ages, races, and religions are getting together and getting high. Queens of the Stoned Age is a series that celebrates ladies who love weed. From grandmas to gamers, housewives to homegirls, some of the most influential women in the game, including Jessica Assaf of the Cannabis Feminist, come together to tackle a variety of issues they face today. Marriage/relationships, careers, food, politics, sex -- no topic is too taboo, no topic is off limits. Check out the Queens of the Stoned Age trailer above. The series is coming to Merry Jane this summer.
<reponame>AlanDuong07/slicedadviceprototype import nc from 'next-connect' import dbConnect from '../../../config/dbConnect'; import { getSingleExpertisePost, updateSingleExpertisePost, deleteSingleExpertisePost } from '../../../controllers/expertisePostControllers' import onError from '../../../middlewares/errors' const handler = nc({ onError }); dbConnect(); handler.get(getSingleExpertisePost); handler.put(updateSingleExpertisePost); handler.delete(deleteSingleExpertisePost); export default handler;
/* * thread_hold: * * Suspend execution of the specified thread. * This is a recursive-style suspension of the thread, a count of * suspends is maintained. */ void thread_hold( register thread_t thread) { spl_t s; s = splsched(); thread_lock(thread); thread->suspend_count++; thread->state |= TH_SUSP; thread_unlock(thread); (void) splx(s); }
from common_tasks import print_error, set_data_mysql, get_device_id, check_ip, get_data import re # Workflows from int_down import interface_down_check def parse_message(message): """ Author - <NAME> Function - Takes in a message and runs checks for each message Inputs - message - String returns - string - ip address of device sending message issue - the issue that was found (link down for example) acted - if the automation acted on the message. """ ip = identify_ip(message) if not ip: return ("", "No valid ip detected in message", False) device_RID = identify_device_RID_from_int_ip(ip) state, issue, acted = interface_down_check(message, device_RID) if state: return (device_RID, issue, acted) # Add in extra checks here if not issue: issue = "Following message was recieved but didn't match any filters\n{}".format(message) return (device_RID, issue, acted) def identify_device_RID_from_int_ip(ip): """ Author - <NAME> Function - finds the device_id based on interface ip Inputs - String - Ip address of interface returns - int - ID of related device """ command = """ SELECT device_id from `FYP Data`.interfaces WHERE ip_address = '{}';""".format(ip) result = get_data(command) print "device_id for {} ip is {}".format(ip, result) if not result: print_error("something wrong getting device information") return(False, "") if len(result) > 1: print_error("more than one device record, can't carry out automation") # email admin return (False, "") result = result[0][0] print "device id is {}".format(result) command = """ SELECT ip from `FYP Data`.device_table WHERE device_id = '{}';""".format(result) result = get_data(command) print "device RID is {}".format(result[0][0]) return result[0][0] def identify_ip(message): """ Author - <NAME> Function - From a messagem parse for the ip address Inputs - message - string returns - ip - string - if failed returns a blank string """ ip_regex = "(?P<host>\d+\.\d+\.\d+\.\d+).*" ip = re.match(ip_regex, message) if not ip: print_error("Syslog message not valid!: \n{}".format(message)) return "" ip = ip.group("host") ip_state = check_ip(ip) if not ip_state: return "" return ip def record_actions(host, message, issue, acted): """ Author - <NAME> Function - sends details to db about event and what happened Inputs - host - string message - string issue - string acted - bool """ device_id = get_device_id(host) command = """ INSERT INTO `FYP Data`.`syslog_events` (`syslog detail`, `device_id`, `acted_on`, `message`) VALUES ("{}", "{}", "{}", "{}"); """.format(issue, device_id, acted, message) if "License" in message: pass elif "bgp" in message: pass else: print_error("sending following command to log event\n{}".format(command)) set_data_mysql(command) def parse(message): """ Author - <NAME> Function - Function called from the monitor calls all other functions. Inputs - message - string """ print "recieved message:\n {}".format(message) device_RID, issue, acted = parse_message(message) print_error("{}, {}\n Was it acted on: {}".format(device_RID, issue, acted)) record_actions(device_RID, message, issue, acted)
<reponame>jugrinovskiy/spark-design-system import { storyWrapper } from '../../../../../../.storybook/helpers/storyWrapper'; import { SprkTabsModule } from './sprk-tabs.module'; import { SprkTabsPanelModule } from '../../directives/sprk-tabs/sprk-tabs-panel/sprk-tabs-panel.module'; import { SprkTabsButtonModule } from '../../directives/sprk-tabs/sprk-tabs-button/sprk-tabs-button.module'; import { SprkTabsComponent } from './sprk-tabs.component'; import { markdownDocumentationLinkBuilder } from '../../../../../../../storybook-utilities/markdownDocumentationLinkBuilder'; import { SprkTabsPanelDirective } from '../../directives/sprk-tabs/sprk-tabs-panel/sprk-tabs-panel.directive'; import { SprkTabsButtonDirective } from '../../directives/sprk-tabs/sprk-tabs-button/sprk-tabs-button.directive'; import { SprkTabbedNavigationModule } from '../sprk-tabbed-navigation/sprk-tabbed-navigation.module'; import { SprkTabbedNavigationPanelModule } from '../../directives/tabbed-navigation/sprk-tabbed-navigation-panel/sprk-tabbed-navigation-panel.module'; import { SprkTabbedNavigationTabModule } from '../../directives/tabbed-navigation/sprk-tabbed-navigation-tab/sprk-tabbed-navigation-tab.module'; export default { title: 'Components/Tabs', component: SprkTabsComponent, decorators: [ storyWrapper( (storyContent) => `<div class="sprk-o-Box sprk-u-JavaScript">${storyContent}<div>`, ), ], parameters: { subcomponents: { SprkTabsPanelDirective, SprkTabsButtonDirective, }, info: ` ${markdownDocumentationLinkBuilder('tabs')} - The Tabs component makes use of the \`sprk-u-JavaScript\` class to provide a graceful degradation experience in environments where JavaScript is not enabled. If \`sprk-u-JavaScript\` is not found on the \`<html>\` element of the page, The content of all Tabs panels will be visible. If \`sprk-u-JavaScript\` is present, only one content panel will be visible at a time. `, docs: { iframeHeight: 300 }, }, }; const modules = { imports: [SprkTabsModule, SprkTabsPanelModule, SprkTabsButtonModule], }; export const defaultStory = () => ({ moduleMetadata: modules, template: ` <sprk-tabs idString="tabs-1"> <button sprkTabsButton analyticsString="Tab: 1" idString="tab-1" > Tab 1 </button> <button sprkTabsButton idString="tab-2" [isDefaultActive]="true" > Tab 2 </button> <button sprkTabsButton idString="tab-3" > Tab 3 </button> <div sprkTabsPanel> <p sprkText variant="bodyTwo">Tab 1 Content Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p> </div> <div [isDefaultActive]="true" sprkTabsPanel> <p sprkText variant="bodyTwo">Tab 2 Content Sed quis rhoncus ipsum. Nulla euismod nisi est, vel consequat ante consectetur in.</p> </div> <div sprkTabsPanel> <p sprkText variant="bodyTwo">Tab 3 Content Ut interdum dictum est at ornare. Nam nec dapibus nibh. Integer venenatis ex eu mi euismod, non ultricies lacus venenatis.</p> </div> </sprk-tabs> `, }); defaultStory.story = { name: 'Default', parameters: { jest: [ 'sprk-tabs.component', 'sprk-tabs-panel.directive', 'sprk-tabs-button.directive', ], }, }; const modules_deprecated = { imports: [ SprkTabbedNavigationModule, SprkTabbedNavigationPanelModule, SprkTabbedNavigationTabModule, ], }; export const deprecated = () => ({ moduleMetadata: modules_deprecated, template: ` <sprk-tabbed-navigation idString="tabs-1"> <button sprkTabbedNavigationTab analyticsString="Tab: 1" idString="tab-1" > Tab 1 </button> <button sprkTabbedNavigationTab idString="tab-2" [defaultActive]="true" > Tab 2 </button> <button sprkTabbedNavigationTab idString="tab-3" > Tab 3 </button> <div sprkTabbedNavigationPanel> <p sprkText variant="bodyTwo">Tab 1 Content Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed quis rhoncus ipsum. Nulla euismod nisi est, vel consequat ante consectetur in. Ut interdum dictum est at ornare. Nam nec dapibus nibh. Integer venenatis ex eu mi euismod, non ultricies lacus venenatis.</p> </div> <div [defaultActive]="true" sprkTabbedNavigationPanel> <p sprkText variant="bodyTwo">Tab 2 Content Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed quis rhoncus ipsum. Nulla euismod nisi est, vel consequat ante consectetur in. Ut interdum dictum est at ornare. Nam nec dapibus nibh. Integer venenatis ex eu mi euismod, non ultricies lacus venenatis.</p> </div> <div sprkTabbedNavigationPanel> <p sprkText variant="bodyTwo">Tab 3 Content Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed quis rhoncus ipsum. Nulla euismod nisi est, vel consequat ante consectetur in. Ut interdum dictum est at ornare. Nam nec dapibus nibh. Integer venenatis ex eu mi euismod, non ultricies lacus venenatis.</p> </div> </sprk-tabbed-navigation> `, }); deprecated.story = { name: 'Legacy (Deprecated)', parameters: { jest: [ 'sprk-tabbed-navigation.component', 'sprk-tabbed-navigation-panel.directive', 'sprk-tabbed-navigation-tab.directive', ], }, };
package cmd import ( "github.com/dictyBase-docker/github-actions/internal/app/chart" "github.com/urfave/cli" ) func DeployChartCmd() cli.Command { return cli.Command{ Name: "deploy-chart", Usage: "deploy helm chart", Aliases: []string{"dc"}, Action: chart.DeployChart, Flags: []cli.Flag{ cli.StringFlag{ Name: "name", Usage: "Name of the chart", Required: true, }, cli.StringFlag{ Name: "namespace", Usage: "Kubernetes namespace", Required: true, }, cli.StringFlag{ Name: "image-tag", Usage: "Docker image tag", Required: true, }, cli.StringFlag{ Name: "path", Usage: "Relative chart path from the root of the repo", Required: true, }, }, } }
/** * Returns the type of the innermost enclosing instance, or null if there is none. This is the * same as {@link DeclaredType#getEnclosingType()} except that it returns null rather than * NoType for a static type. We need this because of * <a href="https://bugs.eclipse.org/bugs/show_bug.cgi?id=508222">this bug</a> whereby * the Eclipse compiler returns a value for static classes that is not NoType. */ private static TypeMirror enclosingType(DeclaredType t) { TypeMirror enclosing = t.getEnclosingType(); if (enclosing.getKind().equals(TypeKind.NONE) || t.asElement().getModifiers().contains(Modifier.STATIC)) { return null; } return enclosing; }
# Vasya_and_Socks.py a,b = map(int,input().split()) adder,temp,ans = 0,a,a while(temp!=0): temp = int(ans//b) temp = temp-adder ans = ans + temp adder = adder +temp print(ans)
import { Model } from './model'; import { Categoria } from './categoria'; import { Servico } from './servico'; import { Produto } from './produto'; export class TabelaPreco extends Model { nome: string; ativo: boolean; itens: ItemTabelaPreco[]; servico: Servico; } export class ItemTabelaPreco { precoProduto: number; precoServico: number; produto: Produto; }
<gh_stars>0 package it.unisannio.studenti.franco.raffaele.communityandroidclient; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import androidx.appcompat.app.AppCompatActivity; import com.google.android.material.snackbar.Snackbar; import com.google.gson.Gson; import org.restlet.resource.ClientResource; import org.restlet.resource.ResourceException; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import it.unisannio.studenti.franco.raffaele.communityandroidclient.commons.ErrorCodes; import it.unisannio.studenti.franco.raffaele.communityandroidclient.commons.InvalidKeyException; import it.unisannio.studenti.franco.raffaele.communityandroidclient.commons.Item; import it.unisannio.studenti.franco.raffaele.communityandroidclient.commons.Question; import it.unisannio.studenti.franco.raffaele.communityandroidclient.commons.Response; import static it.unisannio.studenti.franco.raffaele.communityandroidclient.LoginActivity.prefName; public class ViewOpenedRequestActivity extends AppCompatActivity { private final String TAG = "Community"; private String baseURI = "http://10.0.2.2:8182/CommunityApplication/"; private SharedPreferences preferences; private ListView list; private Question question; private ArrayList<Item> responses; private TextView title; private TextView text; private TextView username_req; private CustomAdapter adapter; private Button answer; private Button close; private Button menu; private String param; private Set<String> titles; private TextView score; private Button up; private Button down; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_view_opened_user_request); preferences = getSharedPreferences(prefName, MODE_PRIVATE); title = findViewById(R.id.textView_title); text = findViewById(R.id.textView_text); username_req = findViewById(R.id.textView_username_req); list = findViewById(R.id.response_list); answer = findViewById(R.id.answer); close = findViewById(R.id.close); menu = findViewById(R.id.menu_button); score = findViewById(R.id.textView_score); up = findViewById(R.id.up); down = findViewById(R.id.down); Intent fromCaller = getIntent(); param = fromCaller.getStringExtra(getResources().getString(R.string.key)); new ViewOpenedRequestActivity.GetOpenedReqRestTask().execute(param); answer.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(ViewOpenedRequestActivity.this, AnswerActivity.class); intent.putExtra(getResources().getString(R.string.key), param); startActivity(intent); } }); close.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { new ViewOpenedRequestActivity.CloseReqRestTask().execute(param); } }); menu.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(ViewOpenedRequestActivity.this, MenuActivity.class); startActivity(intent); } }); list.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { Intent intent = new Intent(ViewOpenedRequestActivity.this, ViewResponseActivity.class); intent.putExtra(getResources().getString(R.string.key_response), responses.get(position).getDescription()); intent.putExtra(getResources().getString(R.string.key_response1), question.searchResponseByUserEText(responses.get(position).getActivity(), responses.get(position).getDescription())); startActivity(intent); } }); up.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { new ViewOpenedRequestActivity.ScoreQuestionTask().execute(param, "5"); int score1 = question.getScore() + 5; score.setText(Integer.toString(score1)); up.setClickable(false); down.setClickable(false); } }); down.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { new ViewOpenedRequestActivity.ScoreQuestionTask().execute(param, "2"); int score1 = question.getScore() - 2; score.setText(Integer.toString(score1)); up.setClickable(false); down.setClickable(false); } }); } public class GetOpenedReqRestTask extends AsyncTask<String, Void, String> { protected String doInBackground(String... params) { ClientResource cr; Gson gson = new Gson(); String username = preferences.getString("username", null); if (username == null) { username = preferences.getString("user", null); } String URI = baseURI + "opened_requests_title/" + params[0]; String jsonResponse = null; cr = new ClientResource(URI); try { jsonResponse = cr.get().getText(); if (cr.getStatus().getCode() == ErrorCodes.INVALID_KEY_CODE) throw gson.fromJson(jsonResponse, InvalidKeyException.class); question = gson.fromJson(jsonResponse, Question.class); } catch (ResourceException | IOException e1) { if (org.restlet.data.Status.CLIENT_ERROR_UNAUTHORIZED.equals(cr.getStatus())) { jsonResponse = "Access unauthorized by the server, check your credentials"; Log.e(TAG, jsonResponse); } else { jsonResponse = "Error: " + cr.getStatus().getCode() + " - " + cr.getStatus().getDescription() + " - " + cr.getStatus().getReasonPhrase(); Log.e(TAG, jsonResponse); } } catch (InvalidKeyException e2) { String error2 = "Error: " + cr.getStatus().getCode() + " - " + e2.getMessage(); Log.e(TAG, error2); } return jsonResponse; } @Override protected void onPostExecute(String res) { if (res != null) { title.setText(question.getTitle()); text.setText(question.getText()); username_req.setText(question.getUser()); String scoreString = Integer.toString(question.getScore()); score.setText(scoreString); responses = new ArrayList<Item>(); for (Response r : question.getResponses()) responses.add(new Item(r.getUser(), r.getText())); if (responses.size() == 0) { responses.add(new Item(getString(R.string.no_responses_available), "")); } adapter = new CustomAdapter(getApplicationContext(), R.layout.list_item_responses, responses); list.setAdapter(adapter); } } } public class CloseReqRestTask extends AsyncTask<String, Void, String> { protected String doInBackground(String... params) { ClientResource cr; Gson gson = new Gson(); String username = preferences.getString("username", null); if (username == null) { username = preferences.getString("user", null); } String URI = baseURI + "closed_requests/status/" + username; String jsonResponse = null; cr = new ClientResource(URI); try { jsonResponse = cr.post(gson.toJson(params[0], String.class)).getText(); if (cr.getStatus().getCode() == ErrorCodes.INVALID_KEY_CODE) throw gson.fromJson(jsonResponse, InvalidKeyException.class); } catch (ResourceException | IOException e1) { if (org.restlet.data.Status.CLIENT_ERROR_UNAUTHORIZED.equals(cr.getStatus())) { jsonResponse = "Access unauthorized by the server, check your credentials"; Log.e(TAG, jsonResponse); } else { jsonResponse = "Error: " + cr.getStatus().getCode() + " - " + cr.getStatus().getDescription() + " - " + cr.getStatus().getReasonPhrase(); Log.e(TAG, jsonResponse); } } catch (InvalidKeyException e2) { String error2 = "Error: " + cr.getStatus().getCode() + " - " + e2.getMessage(); Log.e(TAG, error2); } return jsonResponse; } @Override protected void onPostExecute(String res) { preferences.getStringSet("titles", titles); Snackbar.make(title, res, Snackbar.LENGTH_LONG).show(); titles = preferences.getStringSet("titles", titles); if (titles == null) { titles = new HashSet<>(); } titles.add(String.valueOf(title.getText())); SharedPreferences.Editor edit = preferences.edit(); edit.putStringSet("titles", titles).apply(); } } public class ScoreQuestionTask extends AsyncTask<String, Void, String> { protected String doInBackground(String... params) { ClientResource cr; Gson gson = new Gson(); String username = preferences.getString("username", null); if (username == null) { username = preferences.getString("user", null); } String URI = baseURI + "score_requests/" + username; String jsonResponse = null; cr = new ClientResource(URI); try { jsonResponse = cr.post(gson.toJson(params[0] + ";" + params[1], String.class)).getText(); if (cr.getStatus().getCode() == ErrorCodes.INVALID_KEY_CODE) throw gson.fromJson(jsonResponse, InvalidKeyException.class); } catch (ResourceException | IOException e1) { if (org.restlet.data.Status.CLIENT_ERROR_UNAUTHORIZED.equals(cr.getStatus())) { jsonResponse = "Access unauthorized by the server, check your credentials"; Log.e(TAG, jsonResponse); } else { jsonResponse = "Error: " + cr.getStatus().getCode() + " - " + cr.getStatus().getDescription() + " - " + cr.getStatus().getReasonPhrase(); Log.e(TAG, jsonResponse); } } catch (InvalidKeyException e2) { String error2 = "Error: " + cr.getStatus().getCode() + " - " + e2.getMessage(); Log.e(TAG, error2); } return jsonResponse; } @Override protected void onPostExecute(String res) { preferences.getStringSet("titles", titles); //Toast.makeText(getApplicationContext(), res, Toast.LENGTH_SHORT).show(); ; Snackbar snackbar = Snackbar.make(answer, res, Snackbar.LENGTH_LONG); snackbar.show(); titles = preferences.getStringSet("titles", titles); if (titles == null) { titles = new HashSet<>(); } titles.add(String.valueOf(title.getText())); SharedPreferences.Editor edit = preferences.edit(); edit.putStringSet("titles", titles).apply(); } } public class CustomAdapter extends ArrayAdapter<Item> { public CustomAdapter(Context context, int textViewResourceId, List<Item> objects) { super(context, textViewResourceId, objects); } @Override public View getView(int position, View convertView, ViewGroup parent) { LayoutInflater inflater = (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE); convertView = inflater.inflate(R.layout.list_item_responses, null); TextView activity = convertView.findViewById(R.id.username_resp); TextView description = convertView.findViewById(R.id.response); Item item = getItem(position); activity.setText(item.getActivity()); description.setText(item.getDescription()); return convertView; } } }
Cannabis legalization is emerging at a time when consumers expect more from retail. The impact of online shopping has resulted in everyone from tiny boutiques to traditional retail establishments understanding that customers want to be lured by a pleasant shopping experience. Whether a cannabis store is opening in a fresh legal market or expanding its consumer base from medical to recreational clients, retail owners must be intentional about creating environments that are more than getting people to buy something. In this post, we describe the 3 key areas in which dispensary owners should focus their efforts when creating a memorable cannabis shopping experience for customers. Tip 1: Atmosphere is Key Long gone are the days of the classic headshop where a little incense and some Bob Marley were all that was needed to create a vibe. Now, every decision a cannabis retail owner makes – from the way the shop is decorated and organized, to how staff engages with customers – ultimately helps shape an atmosphere that must be able to appeal to clients while also staying authentic to your brand. When working toward developing a branded dispensary experience, business owners must be prepared to make decisions that are consistent with their store’s mission. On the most basic level, the brick and mortar retail location must be in alignment with the shop’s website and social media presence. Not only that, your staff is a reflection of your brand, so you want to hire budtenders and managers that support the shop’s vision and purpose. Retail owners will want to also consider the flow of store traffic, especially when business is booming. Putting into place tools like a solid cannabis dispensary POS, and integrating systems like a separate line for online order pick-up can improve the ambiance by making customer transactions quicker and smoother, especially if you’re dealing with limited space. As important to the physical store environment is keeping your target customers in mind. For example, a modern aesthetic and stark lighting might resonate more with a high-end professional clientele, whereas a medical patient community might appreciate casual couches and warm wooden finishes. When it comes to choosing music for your retail operation, do you want clients to hear questionable lyrics and commercial interruptions as they shop? Music is supposed to enhance the retail experience, not serve as a distraction, which is why being intentional about everything from music genre to streaming method can make a huge impact. Tip 2: Use Technology to Streamline Your Operations A poor performing POS system can be a liability to a cannabis retail business. It creates more manual work for your staff and subsequently, an increased likelihood of human error, which could be highly problematic for a business that is required to report sales and inventory data to the government. Slower processes also mean your clients are spending more time in line than they are perusing the merchandise or being served by staff. But a good quality POS system helps nurture a pleasant shopping experience by enabling your shop to run more smoothly. The right cannabis POS solution puts product and consumer information at every budtender’s fingertips – no need to rely on a single terminal or overstuffed binder to access strain details or a client’s order history. Activating features like a dispensary touchscreen menu can help customers to be more interactive with your store while also making budtenders’ lives easier, enabling them to focus less on temperamental tech and more on engaging with clients. And your dispensary’s POS software is only the beginning; there are plenty of tools that can help your staff address customers’ needs quickly and accurately. Linking POS wirelessly with tablets gives your budtenders access to consumer and product information quickly from anywhere in the store and at every stage of a customer’s shopping experience, not just behind the cashwrap when clients are ready to check out. Plus, the explosive growth of the cannabis industry means more retail tools are becoming available every day. From strain info to HR solutions to third party delivery, there’s no shortage of options for retailers seeking ways to tighten up operations and offer the best possible experience for their customers. Tip 3: Provide Stellar Customer Service This goes without saying, but customer service is critical to the success of any retail operation. And in this area, cannabis retailers have a little more to contend with than operators in traditional industries because there’s a huge educational component. Not only that, but the law requires budtenders to be able to provide advice without actually making recommendations. Implementing customer service best practices can help create guidelines for your staff to properly address clients’ needs. A framework that specifies hiring personnel with real cannabis experience, encourages budtenders to refer to regular clients by their first names, and positions the dispensary as a community resource for educational workshops helps set the tone and makes it easier for managers and staff to provide the level of service your clients deserve. Retail owners must also remember the consumer population that is visiting their first dispensary. Maybe they’re new to cannabis or haven’t tried it in years, even decades – either way, cannabis newbies are looking to you and your staff to tell them everything: from personal experiences with products, to what’s legal, to whether a myth they’ve heard is fact or fiction. Even the most experienced budtender must be able to educate beginner and expert consumers alike with the facts, address their questions and ease their concerns all the same time. When it comes to customer service, retail owners also have to be prepared for anything. Dispensaries are already major focal points in their communities because of the industry’s prohibition history and the nature of the products being sold. Making sure staff is skilled enough to clarify misinformation, serve impatient customers, or deal with intoxicated clients as professionally as possible helps demonstrate your shop’s ongoing commitment to customer service. Looking for more ideas and tips to help your cannabis dispensary’s operations? Subscribe to our blog today!
from django.test import Client import json client = Client() def test_react_url() -> None: response = client.get('') assert response.status_code == 200 assert 'React will load' in response.content.decode() def test_existing_user() -> None: response = client.post( '/predict/', dict(platform='YouTube', username='pewdiepie'), content_type='application/json' ) assert response.status_code == 200 ajax_response = json.loads(response.content) assert ajax_response['username'] == 'PewDiePie' assert not ajax_response['insignificant'] assert ajax_response['doesExist'] assert ajax_response['predicted'] def test_non_existing_user() -> None: response = client.post( '/predict/', dict(platform='YouTube', username='akjsdklajsdlajswq'), content_type='application/json' ) assert response.status_code == 200 ajax_response = json.loads(response.content) assert not ajax_response.get('username') assert not ajax_response['doesExist'] assert not ajax_response.get('predicted') def test_insignificant_user() -> None: response = client.post( '/predict/', dict(platform='Twitter', username='DariuszDzionek'), content_type='application/json' ) assert response.status_code == 200 ajax_response = json.loads(response.content) assert not ajax_response.get('username') assert ajax_response['insignificant'] assert not ajax_response.get('predicted') def test_details_not_given() -> None: response = client.post( '/predict/', dict(username='pewdiepie'), content_type='application/json' ) assert response.status_code == 200 ajax_response = json.loads(response.content) assert ajax_response['error']
export const flatten = <T>(array: T[]) => ([] as T[]).concat(...array) export const random = (min: number, max: number) => Math.floor(Math.random() * (max - min) + min) export const range = (start: number, end: number) => Array.from(' '.repeat(end - start), (_, index) => start + index) export const shuffle = <T>(values: T[]) => values .map((value) => [Math.random(), value] as const) .sort(([left], [right]) => left - right) .map(([, value]) => value)
Showdown in Arkansas over “Ad Orientem” EVERAL PEOPLE have sent me copies of a letter dated 14 July 2016. This shocking letter—sent to all priests and deacons in Little Rock by BISHOP ANTHONY B. TAYLOR—orders that Mass “will always be celebrated facing the people in our diocese.” When the 2000 (2002) Missal was promulgated, the Vatican’s CDW was asked whether bishops have authority to forbid “ad orientem” celebration. Dated 10 April 2000, the CDW response was unequivocal: HIS DICASTERY [i.e. the Vatican’s Congregation for Divine Worship] wishes to state that Holy Mass may be celebrated versus populum or versus apsidem. Both positions are in accord with liturgical law; both are to be considered correct. There is no preference expressed in the liturgical legislation for either position. As both positions enjoy the favor of law, the legislation may not be invoked to say that one position or the other accords more closely with the mind of the Church. This letter (PROTOCOL NO. 564/00/L) specifically addresses whether a bishop can forbid “ad orientem.” They stated that, while exercising his rightful role as “moderator of the Sacred Liturgy in the particular Church entrusted to his pastoral care,” the Diocesan Bishop can neither “ exclude nor mandate the use of a legitimate option.” [Those italics are by the CDW.] This letter was sent by the same congregation responsible for drafting the 2000 (2002) Missal and GIRM, which was approved by Pope St. John Paul II. The letter was signed by Cardinal Medina, CDW Prefect, and Archbishop Tamburrino, CDW Secretary. I have no idea why so many people commenting on this issue refuse to make reference to it. BISHOP TAYLOR MADE REFERENCE to a letter dated 12 July 2016 from Most Rev’d ARTHUR J. SERRATELLI, chairman of the USCCB liturgy committee. Specifically, Bishop Serratelli said the 2000 (2002) GIRM shows “a preference for the celebrant’s facing the people.” To support this claim, Bishop Serratelli cited paragraph 299 of the GIRM … but the English translation he used was defective. Here’s the correct translation of GIRM paragraph 299: 2000 (2002) Latin : 299. Altare exstruatur a pariete seiunctum, ut facile circumiri et in eo celebratio versus populum peragi possit, quod expedit ubicumque possibile sit. Correct Translation : 299. Wherever possible, the altar should be built separated from the wall, leaving enough space for the priest to walk around it and making it possible to celebrate facing the people. But the translation Bishop Serratelli cites in his letter is grammatically impossible. Specifically, the “QUOD” phrase cannot modify “celebratio versus populum” since “quod” is neuter while “celebratio” is feminine —as was explained by Dr. J. W. Hunwicke of Lansing College (Sussex, England) in a 2001 article. Many others agree, such as Dr. Richard Cipolla, Chair Emeritus of the Classics Department of Brunswick School, who specifically endorsed this view on 14 July 2016. Fr. Reginald Foster, formerly the Pope’s Latinist, also specifically endorsed Hunwicke’s view in April of 2006. 1 Bishop Serratelli correctly affirms that the 2000 (2002) Missal tells priests when to turn toward the people and when to turn back toward the altar. Surely instructions contradicting such rubrics would not be found in the section dedicated to building (and consecrating) new churches and altars—but that’s where paragraph 299 occurs. 2 Some will struggle to believe an approved English translation could be incorrect. Those people should examine how the rubric “quando celebratur Baptisma” was translated in the 1970 and 1998 editions of the Lectionary. The 1970 got it right, while the 1998 mangled it horribly. A more famous example was an American GIRM adaptation for paragraph 48, which was so mutilated by the USCCB it became incomprehensible, as Dr. Christoph Tietze explained in 2006. I WILL RETURN TO THE ARKANSAS SITUATION in a moment, but first let me say that Mr. Andrew Leung’s article makes a salient point. When “ad orientem” is used in the Ordinary Form, the priest only faces away from the congregation for about five minutes total . I would also like to say that this so-called “controversy” began owing to various communications made by Cardinal Sarah, the CDW Prefect appointed in 2014 by Pope Francis. I consider these communications to be quite powerful, yet able to be understood by everyone: * * “Reform of the Reform” (July 2016) * * “A Letter on the Liturgy” (June 2015) * * “Silent Action of the Heart” (June 2015) Regarding Fr. Lombardi’s 11 July 2016 statement, I believe Fr. Mark Drew was probably correct to label it as “inept.” For example, Fr. Lombardi “clarifies” that no new legislation on AD ORIENTEM is coming in Advent; yet, Cardinal Sarah had said absolutely nothing—not one word—about new legislation. SOME HAVE ALREADY ASCRIBED bad intentions to Bishop Taylor, but I disagree. I suspect he sent his letter without knowing the CDW had specifically said the diocesan bishop cannot outlaw “ad orientem.” I believe that once Bishop Taylor becomes aware of that statement, he will issue a retraction. Furthermore, I strongly suspect Bishop Serratelli will retract his letter when the correct translation of paragraph 299 is brought to his attention. The reality is, everyone has deficiencies, no matter how brilliant they may otherwise be. Bishop Taylor has a doctorate in biblical theology, which has very little to do with liturgical items. That might help explain why he misspells “ad orientem.” To make matters worse, there has been excessive liturgical legislation since the 1960s: thousands of pages! If bishops want their priests to be faithful to the GIRM, why do they consistently ignore certain sections? For example, the current GIRM requires approval by the local bishop for any song replacing the Introit, Offertory, or Communion antiphon. Yet, this requirement is almost always ignored. Indeed, 85% of Catholic churches replace the assigned propers with all kinds of songs lacking approval by the local bishop—and many contain lyrics written by non-Catholics! A discussion about this post is underway. NOTES FROM THIS ARTICLE:
/* EncodeErrorRequest will extract the string message from the request error and add it to the body */ func EncodeErrorRequest(_ context.Context, r *http.Request, request interface{}) error { req := request.(error) r.Body = ioutil.NopCloser(bytes.NewReader([]byte(req.Error()))) return nil }
""" Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions from ambari_commons.os_check import OSCheck from resource_management.libraries.functions.default import default from resource_management import * import status_params import utils import os import itertools import re config = Script.get_config() tmp_dir = Script.get_tmp_dir() stack_name = default("/hostLevelParams/stack_name", None) upgrade_direction = default("/commandParams/upgrade_direction", None) stack_version_unformatted = str(config['hostLevelParams']['stack_version']) hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade version = default("/commandParams/version", None) security_enabled = config['configurations']['cluster-env']['security_enabled'] hdfs_user = status_params.hdfs_user hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix # Some datanode settings dfs_dn_addr = default('/configurations/hdfs-site/dfs.datanode.address', None) dfs_dn_http_addr = default('/configurations/hdfs-site/dfs.datanode.http.address', None) dfs_dn_https_addr = default('/configurations/hdfs-site/dfs.datanode.https.address', None) dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None) dfs_dn_ipc_address = config['configurations']['hdfs-site']['dfs.datanode.ipc.address'] secure_dn_ports_are_in_use = False #hadoop params if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0: mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*" hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec" hadoop_bin = "/usr/hdp/current/hadoop-client/sbin" hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin" hadoop_home = "/usr/hdp/current/hadoop-client" if not security_enabled: hadoop_secure_dn_user = '""' else: dfs_dn_port = utils.get_port(dfs_dn_addr) dfs_dn_http_port = utils.get_port(dfs_dn_http_addr) dfs_dn_https_port = utils.get_port(dfs_dn_https_addr) # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports if dfs_http_policy == "HTTPS_ONLY": secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_https_port) elif dfs_http_policy == "HTTP_AND_HTTPS": secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) or utils.is_secure_port(dfs_dn_https_port) else: # params.dfs_http_policy == "HTTP_ONLY" or not defined: secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) if secure_dn_ports_are_in_use: hadoop_secure_dn_user = hdfs_user else: hadoop_secure_dn_user = '""' else: mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*" hadoop_libexec_dir = "/usr/lib/hadoop/libexec" hadoop_bin = "/usr/lib/hadoop/sbin" hadoop_bin_dir = "/usr/bin" hadoop_home = "/usr/lib/hadoop" hadoop_secure_dn_user = hdfs_user hadoop_conf_dir = "/etc/hadoop/conf" hadoop_conf_empty_dir = "/etc/hadoop/conf.empty" limits_conf_dir = "/etc/security/limits.d" execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir ulimit_cmd = "ulimit -c unlimited ; " #security params smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab'] hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] falcon_user = config['configurations']['falcon-env']['falcon_user'] #exclude file hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", []) exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude'] update_exclude_file_only = default("/commandParams/update_exclude_file_only",False) klist_path_local = functions.get_klist_path() kinit_path_local = functions.get_kinit_path() #hosts hostname = config["hostname"] rm_host = default("/clusterHostInfo/rm_host", []) slave_hosts = default("/clusterHostInfo/slave_hosts", []) oozie_servers = default("/clusterHostInfo/oozie_server", []) hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", []) hive_server_host = default("/clusterHostInfo/hive_server_host", []) hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", []) hs_host = default("/clusterHostInfo/hs_host", []) jtnode_host = default("/clusterHostInfo/jtnode_host", []) namenode_host = default("/clusterHostInfo/namenode_host", []) nm_host = default("/clusterHostInfo/nm_host", []) ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", []) journalnode_hosts = default("/clusterHostInfo/journalnode_hosts", []) zkfc_hosts = default("/clusterHostInfo/zkfc_hosts", []) falcon_host = default("/clusterHostInfo/falcon_server_hosts", []) has_ganglia_server = not len(ganglia_server_hosts) == 0 has_namenodes = not len(namenode_host) == 0 has_jobtracker = not len(jtnode_host) == 0 has_resourcemanager = not len(rm_host) == 0 has_histroryserver = not len(hs_host) == 0 has_hbase_masters = not len(hbase_master_hosts) == 0 has_slaves = not len(slave_hosts) == 0 has_oozie_server = not len(oozie_servers) == 0 has_hcat_server_host = not len(hcat_server_hosts) == 0 has_hive_server_host = not len(hive_server_host) == 0 has_journalnode_hosts = not len(journalnode_hosts) == 0 has_zkfc_hosts = not len(zkfc_hosts) == 0 has_falcon_host = not len(falcon_host) == 0 is_namenode_master = hostname in namenode_host is_jtnode_master = hostname in jtnode_host is_rmnode_master = hostname in rm_host is_hsnode_master = hostname in hs_host is_hbase_master = hostname in hbase_master_hosts is_slave = hostname in slave_hosts if has_ganglia_server: ganglia_server_host = ganglia_server_hosts[0] #users and groups yarn_user = config['configurations']['yarn-env']['yarn_user'] hbase_user = config['configurations']['hbase-env']['hbase_user'] oozie_user = config['configurations']['oozie-env']['oozie_user'] webhcat_user = config['configurations']['hive-env']['hcat_user'] hcat_user = config['configurations']['hive-env']['hcat_user'] hive_user = config['configurations']['hive-env']['hive_user'] smoke_user = config['configurations']['cluster-env']['smokeuser'] smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name'] mapred_user = config['configurations']['mapred-env']['mapred_user'] hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', None) user_group = config['configurations']['cluster-env']['user_group'] proxyuser_group = config['configurations']['hadoop-env']['proxyuser_group'] #hadoop params hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix'] hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger'] dfs_domain_socket_path = config['configurations']['hdfs-site']['dfs.domain.socket.path'] dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path) jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir'] dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir'] namenode_dirs_created_stub_dir = format("{hdfs_log_dir_prefix}/{hdfs_user}") namenode_dirs_stub_filename = "namenode_dirs_created" smoke_hdfs_user_dir = format("/user/{smoke_user}") smoke_hdfs_user_mode = 0770 hdfs_namenode_formatted_mark_suffix = "/namenode-formatted/" namenode_formatted_old_mark_dirs = ["/var/run/hadoop/hdfs/namenode-formatted", format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted"), "/var/lib/hdfs/namenode/formatted"] dfs_name_dirs = dfs_name_dir.split(",") namenode_formatted_mark_dirs = [] for dn_dir in dfs_name_dirs: tmp_mark_dir = format("{dn_dir}{hdfs_namenode_formatted_mark_suffix}") namenode_formatted_mark_dirs.append(tmp_mark_dir) # Use the namenode RPC address if configured, otherwise, fallback to the default file system namenode_address = None if 'dfs.namenode.rpc-address' in config['configurations']['hdfs-site']: namenode_rpcaddress = config['configurations']['hdfs-site']['dfs.namenode.rpc-address'] namenode_address = format("hdfs://{namenode_rpcaddress}") else: namenode_address = config['configurations']['core-site']['fs.defaultFS'] fs_checkpoint_dirs = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir'].split(',') dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir'] dfs_data_dir = ",".join([re.sub(r'^\[.+\]', '', dfs_dir.strip()) for dfs_dir in dfs_data_dir.split(",")]) data_dir_mount_file = config['configurations']['hadoop-env']['dfs.datanode.data.dir.mount.file'] # HDFS High Availability properties dfs_ha_enabled = False dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None) dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None) dfs_ha_automatic_failover_enabled = default("/configurations/hdfs-site/dfs.ha.automatic-failover.enabled", False) # hostname of the active HDFS HA Namenode (only used when HA is enabled) dfs_ha_namenode_active = default("/configurations/hadoop-env/dfs_ha_initial_namenode_active", None) # hostname of the standby HDFS HA Namenode (only used when HA is enabled) dfs_ha_namenode_standby = default("/configurations/hadoop-env/dfs_ha_initial_namenode_standby", None) namenode_id = None namenode_rpc = None if dfs_ha_namenode_ids: dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",") dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list) if dfs_ha_namenode_ids_array_len > 1: dfs_ha_enabled = True if dfs_ha_enabled: for nn_id in dfs_ha_namemodes_ids_list: nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')] if hostname in nn_host: namenode_id = nn_id namenode_rpc = nn_host # With HA enabled namenode_address is recomputed namenode_address = format('hdfs://{dfs_ha_nameservices}') if dfs_http_policy is not None and dfs_http_policy.upper() == "HTTPS_ONLY": https_only = True journalnode_address = default('/configurations/hdfs-site/dfs.journalnode.https-address', None) else: https_only = False journalnode_address = default('/configurations/hdfs-site/dfs.journalnode.http-address', None) if journalnode_address: journalnode_port = journalnode_address.split(":")[1] if security_enabled: _dn_principal_name = config['configurations']['hdfs-site']['dfs.datanode.kerberos.principal'] _dn_keytab = config['configurations']['hdfs-site']['dfs.datanode.keytab.file'] _dn_principal_name = _dn_principal_name.replace('_HOST',hostname.lower()) dn_kinit_cmd = format("{kinit_path_local} -kt {_dn_keytab} {_dn_principal_name};") _nn_principal_name = config['configurations']['hdfs-site']['dfs.namenode.kerberos.principal'] _nn_keytab = config['configurations']['hdfs-site']['dfs.namenode.keytab.file'] _nn_principal_name = _nn_principal_name.replace('_HOST',hostname.lower()) nn_kinit_cmd = format("{kinit_path_local} -kt {_nn_keytab} {_nn_principal_name};") _jn_principal_name = default("/configurations/hdfs-site/dfs.journalnode.kerberos.principal", None) if _jn_principal_name: _jn_principal_name = _jn_principal_name.replace('_HOST', hostname.lower()) _jn_keytab = default("/configurations/hdfs-site/dfs.journalnode.keytab.file", None) jn_kinit_cmd = format("{kinit_path_local} -kt {_jn_keytab} {_jn_principal_name};") else: dn_kinit_cmd = "" nn_kinit_cmd = "" jn_kinit_cmd = "" import functools #create partial functions with common arguments for every HdfsDirectory call #to create hdfs directory we need to call params.HdfsDirectory in code HdfsDirectory = functools.partial( HdfsDirectory, conf_dir=hadoop_conf_dir, hdfs_user=hdfs_user, security_enabled = security_enabled, keytab = hdfs_user_keytab, kinit_path_local = kinit_path_local, bin_dir = hadoop_bin_dir ) # The logic for LZO also exists in OOZIE's params.py io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None) lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower() lzo_packages = get_lzo_packages(stack_version_unformatted) exclude_packages = [] if not lzo_enabled: exclude_packages += lzo_packages name_node_params = default("/commandParams/namenode", None) #hadoop params hadoop_env_sh_template = config['configurations']['hadoop-env']['content'] #hadoop-env.sh java_home = config['hostLevelParams']['java_home'] java_version = int(config['hostLevelParams']['java_version']) if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and not OSCheck.is_suse_family(): # deprecated rhel jsvc_path jsvc_path = "/usr/libexec/bigtop-utils" else: jsvc_path = "/usr/lib/bigtop-utils" hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize'] namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize'] namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize'] namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize'] namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m") namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m") jtnode_opt_newsize = "200m" jtnode_opt_maxnewsize = "200m" jtnode_heapsize = "1024m" ttnode_heapsize = "1024m" dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize'] nfsgateway_heapsize = config['configurations']['hadoop-env']['nfsgateway_heapsize'] mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce") mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce") # ranger host ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", []) has_ranger_admin = not len(ranger_admin_hosts) == 0 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0: # setting flag value for ranger hdfs plugin enable_ranger_hdfs = False ranger_plugin_enable = default("/configurations/ranger-hdfs-plugin-properties/ranger-hdfs-plugin-enabled", "no") if ranger_plugin_enable.lower() == 'yes': enable_ranger_hdfs = True elif ranger_plugin_enable.lower() == 'no': enable_ranger_hdfs = False ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0] #ranger hdfs properties policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080") sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar") xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL") xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit") xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger") xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "<PASSWORD>") xa_db_host = default("/configurations/admin-properties/db_host", "localhost") repo_name = str(config['clusterName']) + '_hadoop' db_enabled = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false") hdfs_enabled = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false") hdfs_dest_dir = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd") hdfs_buffer_dir = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit") hdfs_archive_dir = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive") hdfs_dest_file = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log") hdfs_dest_flush_int_sec = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900") hdfs_dest_rollover_int_sec = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400") hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60") hdfs_buffer_file = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log") hdfs_buffer_flush_int_sec = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60") hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600") hdfs_archive_max_file_count = default("/configurations/ranger-hdfs-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10") ssl_keystore_file = default("/configurations/ranger-hdfs-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks") ssl_keystore_password = default("/configurations/ranger-hdfs-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword") ssl_truststore_file = default("/configurations/ranger-hdfs-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks") ssl_truststore_password = default("/configurations/ranger-hdfs-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "<PASSWORD>") hadoop_security_authentication = config['configurations']['core-site']['hadoop.security.authentication'] hadoop_security_authorization = config['configurations']['core-site']['hadoop.security.authorization'] fs_default_name = config['configurations']['core-site']['fs.defaultFS'] hadoop_security_auth_to_local = config['configurations']['core-site']['hadoop.security.auth_to_local'] hadoop_rpc_protection = default("/configurations/ranger-hdfs-plugin-properties/hadoop.rpc.protection", "-") common_name_for_certificate = default("/configurations/ranger-hdfs-plugin-properties/common.name.for.certificate", "-") repo_config_username = default("/configurations/ranger-hdfs-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hadoop") repo_config_password = default("/configurations/ranger-hdfs-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "<PASSWORD>") if security_enabled: _sn_principal_name = default("/configurations/hdfs-site/dfs.secondary.namenode.kerberos.principal", "nn/_<EMAIL>") _sn_principal_name = _sn_principal_name.replace('_HOST',hostname.lower()) admin_uname = default("/configurations/ranger-env/admin_username", "admin") admin_password = default("/configurations/ranger-env/admin_password", "<PASSWORD>") admin_uname_password = format("{admin_uname}:{admin_password}") ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin") ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "<PASSWORD>") policy_user = default("/configurations/ranger-hdfs-plugin-properties/policy_user", "ambari-qa") #For curl command in ranger plugin to get db connector jdk_location = config['hostLevelParams']['jdk_location'] java_share_dir = '/usr/share/java' if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql': jdbc_symlink_name = "mysql-jdbc-driver.jar" jdbc_jar_name = "mysql-connector-java.jar" elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle': jdbc_jar_name = "ojdbc6.jar" jdbc_symlink_name = "oracle-jdbc-driver.jar" downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}") driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}") driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
package com.example.springtrial.simple; import org.springframework.stereotype.Component; @Component public class FactoryObject { /** * applicationContext defines that to create instance of * FactoryGeneratedObject<br/> * It has to call this {@link FactoryObject#generateFactoryGeneratedObject} method.<br/><br/> * You can also declare this method with @Bean annotation and aforementioned xml config is not required.<br/> * @Scope and @Qualifier("name") also can be used with @Bean annotation. */ public FactoryGeneratedObject generateFactoryGeneratedObject() { FactoryGeneratedObject obj = new FactoryGeneratedObject(); obj.setId(88.99); return obj; } }
import { Meteor } from 'meteor/meteor'; import { Wishlist} from '../../../both/collections/wishlist.collections'; Meteor.publish('wishlist', () => Wishlist.find());
use super::{Material, Shape}; use crate::{Matrix4x4, Point, Vector}; use std::f64::consts::PI; fn round(v: f64) -> f64 { const SIG_FIGS: f64 = 100000.0; (v * SIG_FIGS).round() / SIG_FIGS } #[test] fn the_normal_on_different_axis() { let s = Shape::default(); let n1 = s.normal_at(Point::new(1.0, 0.0, 0.0)); assert_eq!(n1, Vector::new(1.0, 0.0, 0.0)); let n2 = s.normal_at(Point::new(0.0, 1.0, 0.0)); assert_eq!(n2, Vector::new(0.0, 1.0, 0.0)); let n3 = s.normal_at(Point::new(0.0, 0.0, 1.0)); assert_eq!(n3, Vector::new(0.0, 0.0, 1.0)); let p = 3f64.sqrt() / 3.0; let n4 = s.normal_at(Point::new(p, p, p)); assert_eq!(n4, Vector::new(p, p, p)); } #[test] fn the_normal_is_a_normalized_vector() { let s = Shape::default(); let p = 3f64.sqrt() / 3.0; let n = s.normal_at(Point::new(p, p, p)); assert_eq!(n, n.normalize()); } #[test] fn computing_the_normal_on_a_translated_sphere() { let s = Shape::sphere_from_transformation(Matrix4x4::translation(0.0, 1.0, 0.0)); let n = s.normal_at(Point::new(0.0, 1.70711, -0.70711)); assert_eq!(round(n.x()), 0.0); assert_eq!(round(n.y()), 0.70711); assert_eq!(round(n.z()), -0.70711); assert_eq!(round(n.w()), 0.0); } #[test] fn computing_the_normal_on_a_transformed_sphere() { //let m = Matrix4x4::scaling(1.0, 0.5, 1.0).rotate_z(PI / 5.0); let m = Matrix4x4::rotation_z(PI / 5.0).scale(1.0, 0.5, 1.0); let s = Shape::sphere_from_transformation(m); let p = 2f64.sqrt() / 2.0; let n = s.normal_at(Point::new(0.0, p, -p)); assert_eq!(round(n.x()), 0.0); assert_eq!(round(n.y()), 0.97014); assert_eq!(round(n.z()), -0.24254); assert_eq!(round(n.w()), 0.0); } #[test] fn a_sphere_has_a_default_material() { let s = Shape::default(); assert_eq!(s.material(), Material::default()); } #[test] fn a_sphere_may_be_assigned_a_material() { let mut m = Material::default(); m.ambient = 1.0; let s = Shape::sphere(Matrix4x4::identity(), m); assert_eq!(s.material(), m); } #[test] fn the_normal_of_a_plane_is_constant_everywhere() { let p = Shape::plane_default(); let n1 = p.normal_at(Point::new(0.0, 0.0, 0.0)); let n2 = p.normal_at(Point::new(10.0, 0.0, -10.0)); let n3 = p.normal_at(Point::new(-5.0, 0.0, 150.0)); assert_eq!(n1, Vector::new(0.0, 1.0, 0.0)); assert_eq!(n2, Vector::new(0.0, 1.0, 0.0)); assert_eq!(n3, Vector::new(0.0, 1.0, 0.0)); }
/** * <p>Base class for JBoss WildFly security management that uses the administration Java API for managing the command line interface.</p> * <p>Based on JBoss WildFly administration API & Util classes.</p> */ public abstract class BaseWildflyCLIManager { protected static final String DEFAULT_HOST = "localhost"; protected static final int DEFAULT_PORT = 9990; protected static final String DEFAULT_ADMIN_USER = null; protected static final String DEFAULT_ADMIN_PASSWORD = null; protected static final String DEFAULT_REALM = "ApplicationRealm"; protected String host; protected int port; protected String adminUser; protected String adminPassword; protected String realm; protected void loadConfig(final ConfigProperties config) { final ConfigProperties.ConfigProperty host = config.get("org.uberfire.ext.security.management.wildfly.cli.host", DEFAULT_HOST); final ConfigProperties.ConfigProperty port = config.get("org.uberfire.ext.security.management.wildfly.cli.port", Integer.toString(DEFAULT_PORT)); final ConfigProperties.ConfigProperty user = config.get("org.uberfire.ext.security.management.wildfly.cli.user", DEFAULT_ADMIN_USER); final ConfigProperties.ConfigProperty password = config.get("org.uberfire.ext.security.management.wildfly.cli.password", DEFAULT_ADMIN_PASSWORD); final ConfigProperties.ConfigProperty realm = config.get("org.uberfire.ext.security.management.wildfly.cli.realm", DEFAULT_REALM); this.host = host.getValue(); this.port = Integer.decode(port.getValue()); this.adminUser = user.getValue(); this.adminPassword = password.getValue(); this.realm = realm.getValue(); } protected abstract String getPropertiesFilePath(final String context) throws Exception; protected static boolean isConfigPropertySet(ConfigProperties.ConfigProperty property) { if (property == null) return false; String value = property.getValue(); return !isEmpty(value); } protected static boolean isEmpty(String s) { return s == null || s.trim().length() == 0; } }
<filename>src/test/script_standard_tests.cpp //此源码被清华学神尹成大魔王专业翻译分析并修改 //尹成QQ77025077 //尹成微信18510341407 //尹成所在QQ群721929980 //尹成邮箱 <EMAIL> //尹成毕业于清华大学,微软区块链领域全球最有价值专家 //https://mvp.microsoft.com/zh-cn/PublicProfile/4033620 //版权所有(c)2017-2018比特币核心开发商 //根据MIT软件许可证分发,请参见随附的 //文件复制或http://www.opensource.org/licenses/mit-license.php。 #include <key.h> #include <keystore.h> #include <script/ismine.h> #include <script/script.h> #include <script/script_error.h> #include <script/standard.h> #include <test/test_bitcoin.h> #include <boost/test/unit_test.hpp> BOOST_FIXTURE_TEST_SUITE(script_standard_tests, BasicTestingSetup) BOOST_AUTO_TEST_CASE(script_standard_Solver_success) { CKey keys[3]; CPubKey pubkeys[3]; for (int i = 0; i < 3; i++) { keys[i].MakeNewKey(true); pubkeys[i] = keys[i].GetPubKey(); } CScript s; std::vector<std::vector<unsigned char> > solutions; //TXP-PUBKEY s.clear(); s << ToByteVector(pubkeys[0]) << OP_CHECKSIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_PUBKEY); BOOST_CHECK_EQUAL(solutions.size(), 1U); BOOST_CHECK(solutions[0] == ToByteVector(pubkeys[0])); //TXP- PUBKEHASH s.clear(); s << OP_DUP << OP_HASH160 << ToByteVector(pubkeys[0].GetID()) << OP_EQUALVERIFY << OP_CHECKSIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_PUBKEYHASH); BOOST_CHECK_EQUAL(solutions.size(), 1U); BOOST_CHECK(solutions[0] == ToByteVector(pubkeys[0].GetID())); //塔克西斯塔什 CScript redeemScript(s); //用剩余的p2pkh脚本初始化 s.clear(); s << OP_HASH160 << ToByteVector(CScriptID(redeemScript)) << OP_EQUAL; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_SCRIPTHASH); BOOST_CHECK_EQUAL(solutions.size(), 1U); BOOST_CHECK(solutions[0] == ToByteVector(CScriptID(redeemScript))); //TX-Mulsisig s.clear(); s << OP_1 << ToByteVector(pubkeys[0]) << ToByteVector(pubkeys[1]) << OP_2 << OP_CHECKMULTISIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_MULTISIG); BOOST_CHECK_EQUAL(solutions.size(), 4U); BOOST_CHECK(solutions[0] == std::vector<unsigned char>({1})); BOOST_CHECK(solutions[1] == ToByteVector(pubkeys[0])); BOOST_CHECK(solutions[2] == ToByteVector(pubkeys[1])); BOOST_CHECK(solutions[3] == std::vector<unsigned char>({2})); s.clear(); s << OP_2 << ToByteVector(pubkeys[0]) << ToByteVector(pubkeys[1]) << ToByteVector(pubkeys[2]) << OP_3 << OP_CHECKMULTISIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_MULTISIG); BOOST_CHECK_EQUAL(solutions.size(), 5U); BOOST_CHECK(solutions[0] == std::vector<unsigned char>({2})); BOOST_CHECK(solutions[1] == ToByteVector(pubkeys[0])); BOOST_CHECK(solutions[2] == ToByteVector(pubkeys[1])); BOOST_CHECK(solutions[3] == ToByteVector(pubkeys[2])); BOOST_CHECK(solutions[4] == std::vector<unsigned char>({3})); //Tx~(null)数据 s.clear(); s << OP_RETURN << std::vector<unsigned char>({0}) << std::vector<unsigned char>({75}) << std::vector<unsigned char>({255}); BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NULL_DATA); BOOST_CHECK_EQUAL(solutions.size(), 0U); //Tx_见证人_v0_keyhash s.clear(); s << OP_0 << ToByteVector(pubkeys[0].GetID()); BOOST_CHECK_EQUAL(Solver(s, solutions), TX_WITNESS_V0_KEYHASH); BOOST_CHECK_EQUAL(solutions.size(), 1U); BOOST_CHECK(solutions[0] == ToByteVector(pubkeys[0].GetID())); //Tx见证人脚本哈希 uint256 scriptHash; CSHA256().Write(&redeemScript[0], redeemScript.size()) .Finalize(scriptHash.begin()); s.clear(); s << OP_0 << ToByteVector(scriptHash); BOOST_CHECK_EQUAL(Solver(s, solutions), TX_WITNESS_V0_SCRIPTHASH); BOOST_CHECK_EQUAL(solutions.size(), 1U); BOOST_CHECK(solutions[0] == ToByteVector(scriptHash)); //TXX非标 s.clear(); s << OP_9 << OP_ADD << OP_11 << OP_EQUAL; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); } BOOST_AUTO_TEST_CASE(script_standard_Solver_failure) { CKey key; CPubKey pubkey; key.MakeNewKey(true); pubkey = key.GetPubKey(); CScript s; std::vector<std::vector<unsigned char> > solutions; //tx_pubkey的pubkey大小不正确 s.clear(); s << std::vector<unsigned char>(30, 0x01) << OP_CHECKSIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); //Tx_PubKeyHash的密钥哈希大小不正确 s.clear(); s << OP_DUP << OP_HASH160 << ToByteVector(pubkey) << OP_EQUALVERIFY << OP_CHECKSIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); //Tx脚本哈希大小不正确 s.clear(); s << OP_HASH160 << std::vector<unsigned char>(21, 0x01) << OP_EQUAL; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); //TXXMulsig 0/2 s.clear(); s << OP_0 << ToByteVector(pubkey) << OP_1 << OP_CHECKMULTISIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); //TXXMulsig 2/1 s.clear(); s << OP_2 << ToByteVector(pubkey) << OP_1 << OP_CHECKMULTISIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); //tx_multisig n=2,带1个pubkey s.clear(); s << OP_1 << ToByteVector(pubkey) << OP_2 << OP_CHECKMULTISIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); //tx_multisig n=1,带0个公钥 s.clear(); s << OP_1 << OP_1 << OP_CHECKMULTISIG; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); //与其他操作码的Tx_空数据 s.clear(); s << OP_RETURN << std::vector<unsigned char>({75}) << OP_ADD; BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); //Tx_见证程序大小不正确 s.clear(); s << OP_0 << std::vector<unsigned char>(19, 0x01); BOOST_CHECK_EQUAL(Solver(s, solutions), TX_NONSTANDARD); } BOOST_AUTO_TEST_CASE(script_standard_ExtractDestination) { CKey key; CPubKey pubkey; key.MakeNewKey(true); pubkey = key.GetPubKey(); CScript s; CTxDestination address; //TXP-PUBKEY s.clear(); s << ToByteVector(pubkey) << OP_CHECKSIG; BOOST_CHECK(ExtractDestination(s, address)); BOOST_CHECK(boost::get<CKeyID>(&address) && *boost::get<CKeyID>(&address) == pubkey.GetID()); //TXP- PUBKEHASH s.clear(); s << OP_DUP << OP_HASH160 << ToByteVector(pubkey.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG; BOOST_CHECK(ExtractDestination(s, address)); BOOST_CHECK(boost::get<CKeyID>(&address) && *boost::get<CKeyID>(&address) == pubkey.GetID()); //塔克西斯塔什 CScript redeemScript(s); //用剩余的p2pkh脚本初始化 s.clear(); s << OP_HASH160 << ToByteVector(CScriptID(redeemScript)) << OP_EQUAL; BOOST_CHECK(ExtractDestination(s, address)); BOOST_CHECK(boost::get<CScriptID>(&address) && *boost::get<CScriptID>(&address) == CScriptID(redeemScript)); //TX-Mulsisig s.clear(); s << OP_1 << ToByteVector(pubkey) << OP_1 << OP_CHECKMULTISIG; BOOST_CHECK(!ExtractDestination(s, address)); //Tx~(null)数据 s.clear(); s << OP_RETURN << std::vector<unsigned char>({75}); BOOST_CHECK(!ExtractDestination(s, address)); //Tx_见证人_v0_keyhash s.clear(); s << OP_0 << ToByteVector(pubkey.GetID()); BOOST_CHECK(ExtractDestination(s, address)); WitnessV0KeyHash keyhash; CHash160().Write(pubkey.begin(), pubkey.size()).Finalize(keyhash.begin()); BOOST_CHECK(boost::get<WitnessV0KeyHash>(&address) && *boost::get<WitnessV0KeyHash>(&address) == keyhash); //Tx见证人脚本哈希 s.clear(); WitnessV0ScriptHash scripthash; CSHA256().Write(redeemScript.data(), redeemScript.size()).Finalize(scripthash.begin()); s << OP_0 << ToByteVector(scripthash); BOOST_CHECK(ExtractDestination(s, address)); BOOST_CHECK(boost::get<WitnessV0ScriptHash>(&address) && *boost::get<WitnessV0ScriptHash>(&address) == scripthash); //未知版本的Tx_见证人 s.clear(); s << OP_1 << ToByteVector(pubkey); BOOST_CHECK(ExtractDestination(s, address)); WitnessUnknown unk; unk.length = 33; unk.version = 1; std::copy(pubkey.begin(), pubkey.end(), unk.program); BOOST_CHECK(boost::get<WitnessUnknown>(&address) && *boost::get<WitnessUnknown>(&address) == unk); } BOOST_AUTO_TEST_CASE(script_standard_ExtractDestinations) { CKey keys[3]; CPubKey pubkeys[3]; for (int i = 0; i < 3; i++) { keys[i].MakeNewKey(true); pubkeys[i] = keys[i].GetPubKey(); } CScript s; txnouttype whichType; std::vector<CTxDestination> addresses; int nRequired; //TXP-PUBKEY s.clear(); s << ToByteVector(pubkeys[0]) << OP_CHECKSIG; BOOST_CHECK(ExtractDestinations(s, whichType, addresses, nRequired)); BOOST_CHECK_EQUAL(whichType, TX_PUBKEY); BOOST_CHECK_EQUAL(addresses.size(), 1U); BOOST_CHECK_EQUAL(nRequired, 1); BOOST_CHECK(boost::get<CKeyID>(&addresses[0]) && *boost::get<CKeyID>(&addresses[0]) == pubkeys[0].GetID()); //TXP- PUBKEHASH s.clear(); s << OP_DUP << OP_HASH160 << ToByteVector(pubkeys[0].GetID()) << OP_EQUALVERIFY << OP_CHECKSIG; BOOST_CHECK(ExtractDestinations(s, whichType, addresses, nRequired)); BOOST_CHECK_EQUAL(whichType, TX_PUBKEYHASH); BOOST_CHECK_EQUAL(addresses.size(), 1U); BOOST_CHECK_EQUAL(nRequired, 1); BOOST_CHECK(boost::get<CKeyID>(&addresses[0]) && *boost::get<CKeyID>(&addresses[0]) == pubkeys[0].GetID()); //塔克西斯塔什 CScript redeemScript(s); //用剩余的p2pkh脚本初始化 s.clear(); s << OP_HASH160 << ToByteVector(CScriptID(redeemScript)) << OP_EQUAL; BOOST_CHECK(ExtractDestinations(s, whichType, addresses, nRequired)); BOOST_CHECK_EQUAL(whichType, TX_SCRIPTHASH); BOOST_CHECK_EQUAL(addresses.size(), 1U); BOOST_CHECK_EQUAL(nRequired, 1); BOOST_CHECK(boost::get<CScriptID>(&addresses[0]) && *boost::get<CScriptID>(&addresses[0]) == CScriptID(redeemScript)); //TX-Mulsisig s.clear(); s << OP_2 << ToByteVector(pubkeys[0]) << ToByteVector(pubkeys[1]) << OP_2 << OP_CHECKMULTISIG; BOOST_CHECK(ExtractDestinations(s, whichType, addresses, nRequired)); BOOST_CHECK_EQUAL(whichType, TX_MULTISIG); BOOST_CHECK_EQUAL(addresses.size(), 2U); BOOST_CHECK_EQUAL(nRequired, 2); BOOST_CHECK(boost::get<CKeyID>(&addresses[0]) && *boost::get<CKeyID>(&addresses[0]) == pubkeys[0].GetID()); BOOST_CHECK(boost::get<CKeyID>(&addresses[1]) && *boost::get<CKeyID>(&addresses[1]) == pubkeys[1].GetID()); //Tx~(null)数据 s.clear(); s << OP_RETURN << std::vector<unsigned char>({75}); BOOST_CHECK(!ExtractDestinations(s, whichType, addresses, nRequired)); } BOOST_AUTO_TEST_CASE(script_standard_GetScriptFor_) { CKey keys[3]; CPubKey pubkeys[3]; for (int i = 0; i < 3; i++) { keys[i].MakeNewKey(true); pubkeys[i] = keys[i].GetPubKey(); } CScript expected, result; //克基 expected.clear(); expected << OP_DUP << OP_HASH160 << ToByteVector(pubkeys[0].GetID()) << OP_EQUALVERIFY << OP_CHECKSIG; result = GetScriptForDestination(pubkeys[0].GetID()); BOOST_CHECK(result == expected); //克斯皮蒂德 CScript redeemScript(result); expected.clear(); expected << OP_HASH160 << ToByteVector(CScriptID(redeemScript)) << OP_EQUAL; result = GetScriptForDestination(CScriptID(redeemScript)); BOOST_CHECK(result == expected); //中心目的地 expected.clear(); result = GetScriptForDestination(CNoDestination()); BOOST_CHECK(result == expected); //获取scriptforrawpubkey expected.clear(); expected << ToByteVector(pubkeys[0]) << OP_CHECKSIG; result = GetScriptForRawPubKey(pubkeys[0]); BOOST_CHECK(result == expected); //获取脚本多信号 expected.clear(); expected << OP_2 << ToByteVector(pubkeys[0]) << ToByteVector(pubkeys[1]) << ToByteVector(pubkeys[2]) << OP_3 << OP_CHECKMULTISIG; result = GetScriptForMultisig(2, std::vector<CPubKey>(pubkeys, pubkeys + 3)); BOOST_CHECK(result == expected); //获取脚本见证 CScript witnessScript; witnessScript << ToByteVector(pubkeys[0]) << OP_CHECKSIG; expected.clear(); expected << OP_0 << ToByteVector(pubkeys[0].GetID()); result = GetScriptForWitness(witnessScript); BOOST_CHECK(result == expected); witnessScript.clear(); witnessScript << OP_DUP << OP_HASH160 << ToByteVector(pubkeys[0].GetID()) << OP_EQUALVERIFY << OP_CHECKSIG; result = GetScriptForWitness(witnessScript); BOOST_CHECK(result == expected); witnessScript.clear(); witnessScript << OP_1 << ToByteVector(pubkeys[0]) << OP_1 << OP_CHECKMULTISIG; uint256 scriptHash; CSHA256().Write(&witnessScript[0], witnessScript.size()) .Finalize(scriptHash.begin()); expected.clear(); expected << OP_0 << ToByteVector(scriptHash); result = GetScriptForWitness(witnessScript); BOOST_CHECK(result == expected); } BOOST_AUTO_TEST_CASE(script_standard_IsMine) { CKey keys[2]; CPubKey pubkeys[2]; for (int i = 0; i < 2; i++) { keys[i].MakeNewKey(true); pubkeys[i] = keys[i].GetPubKey(); } CKey uncompressedKey; uncompressedKey.MakeNewKey(false); CPubKey uncompressedPubkey = uncompressedKey.GetPubKey(); CScript scriptPubKey; isminetype result; //压缩2PK { CBasicKeyStore keystore; scriptPubKey = GetScriptForRawPubKey(pubkeys[0]); //密钥库没有密钥 result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //密钥库有密钥 BOOST_CHECK(keystore.AddKey(keys[0])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //p2pk未压缩 { CBasicKeyStore keystore; scriptPubKey = GetScriptForRawPubKey(uncompressedPubkey); //密钥库没有密钥 result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //密钥库有密钥 BOOST_CHECK(keystore.AddKey(uncompressedKey)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //p2pkh压缩 { CBasicKeyStore keystore; scriptPubKey = GetScriptForDestination(pubkeys[0].GetID()); //密钥库没有密钥 result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //密钥库有密钥 BOOST_CHECK(keystore.AddKey(keys[0])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //p2pkh未压缩 { CBasicKeyStore keystore; scriptPubKey = GetScriptForDestination(uncompressedPubkey.GetID()); //密钥库没有密钥 result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //密钥库有密钥 BOOST_CHECK(keystore.AddKey(uncompressedKey)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //2SH { CBasicKeyStore keystore; CScript redeemScript = GetScriptForDestination(pubkeys[0].GetID()); scriptPubKey = GetScriptForDestination(CScriptID(redeemScript)); //keystore没有redeemscript或key result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有redeemscript,但没有密钥 BOOST_CHECK(keystore.AddCScript(redeemScript)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore具有redeemscript和key BOOST_CHECK(keystore.AddKey(keys[0])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //(内部p2pkh)p2sh内部p2sh(无效) { CBasicKeyStore keystore; CScript redeemscript_inner = GetScriptForDestination(pubkeys[0].GetID()); CScript redeemscript = GetScriptForDestination(CScriptID(redeemscript_inner)); scriptPubKey = GetScriptForDestination(CScriptID(redeemscript)); BOOST_CHECK(keystore.AddCScript(redeemscript)); BOOST_CHECK(keystore.AddCScript(redeemscript_inner)); BOOST_CHECK(keystore.AddCScript(scriptPubKey)); BOOST_CHECK(keystore.AddKey(keys[0])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //(p2pkh内部)p2sh内部p2wsh(无效) { CBasicKeyStore keystore; CScript redeemscript = GetScriptForDestination(pubkeys[0].GetID()); CScript witnessscript = GetScriptForDestination(CScriptID(redeemscript)); scriptPubKey = GetScriptForDestination(WitnessV0ScriptHash(witnessscript)); BOOST_CHECK(keystore.AddCScript(witnessscript)); BOOST_CHECK(keystore.AddCScript(redeemscript)); BOOST_CHECK(keystore.AddCScript(scriptPubKey)); BOOST_CHECK(keystore.AddKey(keys[0])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //p2wpkh内部p2wsh(无效) { CBasicKeyStore keystore; CScript witnessscript = GetScriptForDestination(WitnessV0KeyHash(pubkeys[0].GetID())); scriptPubKey = GetScriptForDestination(WitnessV0ScriptHash(witnessscript)); BOOST_CHECK(keystore.AddCScript(witnessscript)); BOOST_CHECK(keystore.AddCScript(scriptPubKey)); BOOST_CHECK(keystore.AddKey(keys[0])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //(内部p2pkh)p2wsh内部p2wsh(无效) { CBasicKeyStore keystore; CScript witnessscript_inner = GetScriptForDestination(pubkeys[0].GetID()); CScript witnessscript = GetScriptForDestination(WitnessV0ScriptHash(witnessscript_inner)); scriptPubKey = GetScriptForDestination(WitnessV0ScriptHash(witnessscript)); BOOST_CHECK(keystore.AddCScript(witnessscript_inner)); BOOST_CHECK(keystore.AddCScript(witnessscript)); BOOST_CHECK(keystore.AddCScript(scriptPubKey)); BOOST_CHECK(keystore.AddKey(keys[0])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //p2wpkh压缩 { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(keys[0])); scriptPubKey = GetScriptForDestination(WitnessV0KeyHash(pubkeys[0].GetID())); //keystore隐式包含key和p2sh redeemscript BOOST_CHECK(keystore.AddCScript(scriptPubKey)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //p2wpkh未压缩 { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(uncompressedKey)); scriptPubKey = GetScriptForDestination(WitnessV0KeyHash(uncompressedPubkey.GetID())); //密钥库有密钥,但没有p2sh redeemscript result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有key和p2sh redeemscript BOOST_CHECK(keystore.AddCScript(scriptPubKey)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //脚本pubkey multisig { CBasicKeyStore keystore; scriptPubKey = GetScriptForMultisig(2, {uncompressedPubkey, pubkeys[1]}); //密钥库没有任何密钥 result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //密钥库有1/2个密钥 BOOST_CHECK(keystore.AddKey(uncompressedKey)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //密钥库有2/2个密钥 BOOST_CHECK(keystore.AddKey(keys[1])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有2/2个键和脚本 BOOST_CHECK(keystore.AddCScript(scriptPubKey)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //2SH多硅 { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(uncompressedKey)); BOOST_CHECK(keystore.AddKey(keys[1])); CScript redeemScript = GetScriptForMultisig(2, {uncompressedPubkey, pubkeys[1]}); scriptPubKey = GetScriptForDestination(CScriptID(redeemScript)); //keystore没有redeemscript result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有redeemscript BOOST_CHECK(keystore.AddCScript(redeemScript)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //p2wsh带压缩键的multisig { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(keys[0])); BOOST_CHECK(keystore.AddKey(keys[1])); CScript witnessScript = GetScriptForMultisig(2, {pubkeys[0], pubkeys[1]}); scriptPubKey = GetScriptForDestination(WitnessV0ScriptHash(witnessScript)); //keystore有密钥,但没有witnessscript或p2sh redeemscript result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有keys和witnessscript,但没有p2sh redeemscript BOOST_CHECK(keystore.AddCScript(witnessScript)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有keys、witnessscript、p2sh redeemscript BOOST_CHECK(keystore.AddCScript(scriptPubKey)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //带未压缩密钥的p2wsh multisig { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(uncompressedKey)); BOOST_CHECK(keystore.AddKey(keys[1])); CScript witnessScript = GetScriptForMultisig(2, {uncompressedPubkey, pubkeys[1]}); scriptPubKey = GetScriptForDestination(WitnessV0ScriptHash(witnessScript)); //keystore有密钥,但没有witnessscript或p2sh redeemscript result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有keys和witnessscript,但没有p2sh redeemscript BOOST_CHECK(keystore.AddCScript(witnessScript)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有keys、witnessscript、p2sh redeemscript BOOST_CHECK(keystore.AddCScript(scriptPubKey)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //p2wsh multisig包裹在p2sh中 { CBasicKeyStore keystore; CScript witnessScript = GetScriptForMultisig(2, {pubkeys[0], pubkeys[1]}); CScript redeemScript = GetScriptForDestination(WitnessV0ScriptHash(witnessScript)); scriptPubKey = GetScriptForDestination(CScriptID(redeemScript)); //keystore没有witnessscript、p2sh redeemscript或keys result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有witnessscript和p2sh redeemscript,但没有密钥 BOOST_CHECK(keystore.AddCScript(redeemScript)); BOOST_CHECK(keystore.AddCScript(witnessScript)); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); //keystore有keys、witnessscript、p2sh redeemscript BOOST_CHECK(keystore.AddKey(keys[0])); BOOST_CHECK(keystore.AddKey(keys[1])); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_SPENDABLE); } //opr返回 { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(keys[0])); scriptPubKey.clear(); scriptPubKey << OP_RETURN << ToByteVector(pubkeys[0]); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //证人不可依赖 { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(keys[0])); scriptPubKey.clear(); scriptPubKey << OP_0 << ToByteVector(ParseHex("aabb")); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //证人未知 { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(keys[0])); scriptPubKey.clear(); scriptPubKey << OP_16 << ToByteVector(ParseHex("aabb")); result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } //非标准的 { CBasicKeyStore keystore; BOOST_CHECK(keystore.AddKey(keys[0])); scriptPubKey.clear(); scriptPubKey << OP_9 << OP_ADD << OP_11 << OP_EQUAL; result = IsMine(keystore, scriptPubKey); BOOST_CHECK_EQUAL(result, ISMINE_NO); } } BOOST_AUTO_TEST_SUITE_END()
#include "help.h" /**************************************************************************************************************/ /*Constructs the Help object*/ Help::Help(StateManager * inStateManager, SDL_Renderer* inRenderer, int inWidth, int inHeight) : State(inStateManager, inRenderer, inWidth, inHeight) { /*initialise the text*/ text.push_back(new Text("Hit Delete to Quit", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("Hit Escape to Return", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("Commands:", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("-Press enter to apply all rules.", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("-Press 1 to apply rule 1 only.", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("-Press 2 to apply rule 2 only.", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("-Press 3 to apply rule 3 only.", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("-Hold Space to scatter the flock.", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("Rules:", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("-Rule 1 = Boids try to fly towards the centre of mass", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("of neighbouring boids.", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("-Rule 2 = Boids try to keep a small distance away from other", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("objects (including other boids).", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); text.push_back(new Text("-Rule 3 = Boids try to match velocity with near boids.", "font/Underdog_tt_hinted.ttf", renderer, 102, 51, 153)); } /**************************************************************************************************************/ /*Destructs the Help object*/ Help::~Help() { /*delete text*/ for (unsigned int i = 0; i < text.size(); i++) { delete text[i]; } } /**************************************************************************************************************/ /*handles inputs*/ bool Help::input() { /*Check for user input*/ SDL_Event incomingEvent; while (SDL_PollEvent(&incomingEvent)) { switch (incomingEvent.type) { case SDL_QUIT: /*If player closes the window, end the game loop*/ return false; break; case SDL_KEYDOWN: switch (incomingEvent.key.keysym.sym) { case SDLK_DELETE: /*If Delete is pressed, end the game loop*/ return false; break; case SDLK_ESCAPE: /*If Escape is pressed, return to game*/ /*return to the game*/ stateManager->changeState(new Game(stateManager, renderer, screenWidth, screenHeight)); return true; break; } break; } } return true; } /**************************************************************************************************************/ /*updates the Help*/ void Help::update(float dt) { } /**************************************************************************************************************/ /*draws the Help*/ void Help::draw() { /*set draw colour to purple*/ SDL_SetRenderDrawColor(renderer, 0xFF, 0xFF, 0xFF, 0xFF); /*Clear the entire screen to the set colour*/ SDL_RenderClear(renderer); /*display text*/ text[0]->pushToScreen(renderer, screenWidth - 210, 10, 200, 20); text[1]->pushToScreen(renderer, 10, 10, 200, 20); text[2]->pushToScreen(renderer, 10, 70, 100, 20); text[3]->pushToScreen(renderer, 10, 100, 300, 20); text[4]->pushToScreen(renderer, 10, 130, 280, 20); text[5]->pushToScreen(renderer, 10, 160, 280, 20); text[6]->pushToScreen(renderer, 10, 190, 280, 20); text[7]->pushToScreen(renderer, 10, 220, 320, 20); text[8]->pushToScreen(renderer, 10, 280, 60, 20); text[9]->pushToScreen(renderer, 10, 310, 550, 20); text[10]->pushToScreen(renderer, 110, 340, 220, 20); text[11]->pushToScreen(renderer, 10, 370, 580, 20); text[12]->pushToScreen(renderer, 110, 400, 260, 20); text[13]->pushToScreen(renderer, 10, 430, 500, 20); /*display renderer*/ SDL_RenderPresent(renderer); }
<reponame>dornerworks/camkes<filename>apps/aeroplage/components/Switch/switch.c /* * Copyright 2017, Data61 * Commonwealth Scientific and Industrial Research Organisation (CSIRO) * ABN 41 687 119 230. * * This software may be distributed and modified according to the terms of * the BSD 2-Clause license. Note that NO WARRANTY is provided. * See "LICENSE_BSD2.txt" for details. * * @TAG(DATA61_BSD) */ /* CAmkES provides a generated header that prototypes all the relevant * generated symbols. */ #include <camkes.h> #include <ringbuffer/ringbuffer.h> #include <stdlib.h> /* If we receive this character, interpret it as a request to switch outputs. */ #define SWITCH_CHAR '\t' /* The current output we're sending data to. */ static enum { LOW, HIGH, } selected = LOW; /* This function is invoked by the main CAmkES thread in this component. */ int run(void) { ringbuffer_t *low = rb_new((void*)low_output, sizeof(*low_output)); if (low == NULL) { abort(); } ringbuffer_t *high = rb_new((void*)high_output, sizeof(*high_output)); if (high == NULL) { abort(); } ringbuffer_t *input = rb_new((void*)char_in, sizeof(*char_in)); if (input == NULL) { abort(); } while (true) { char c = (char)rb_receive_byte(input); // printf("Switch received: %x\n", c); if (c == SWITCH_CHAR) { // printf("Switching!\n"); /* Swap which output we see as active. */ if (selected == LOW) { selected = HIGH; } else { selected = LOW; } } else { /* Send the character to the active output. */ if (selected == LOW) { rb_transmit_byte(low, (unsigned char)c); } else { rb_transmit_byte(high, (unsigned char)c); } } } return 0; }
// PostNewOutfit saves a new outfit into database func PostNewOutfit(request *http.Request, userID string) { outfit, err := model.NewOutfitForm(request.R()) if err != nil { request.BadRequest(err.Error()) return } outfit.ID = uuid.Must(uuid.NewV4()).String() outfit.UserID = userID outfit.CreatedAt = time.Now() if err := db.InsertOutfit(*outfit); err != nil { request.InternalServerError(errors.Annotate(err, "inserting outfit into database failed")) return } request.Redirect("/outfits/" + outfit.ID) }
Unicystic ameloblastoma- A case report It is the most common benign tumor that can be seen significantly. It is common odontogenic tumor involving the molar ramus area. As this tumor shows some similarities with dentigerous cysts, both clinically and radiographically the biologic behaviour of this tumor group was reviewed. The recurrence is more in unicystic ameloblastoma and patient has to have the long time followup. Here we are presenting a case of unicystic ameloblastoma in 55year female who has got edentulous ridge. Introduction Ameloblastomas are benign tumors as its importance lies in its potential to grow into enormous size with resulting bone deformity. They are typically classified as unicystic, multicystic, peripheral and malignant subtypes. 1 Ameloblastomas are tumors of odontogenic epithelial origin. Theoretically, they may arise from rests of dental lamina, from a developing enamel organ, from the epithelial lining of an odontogenic cyst, or from the basal cells of the oral mucosa. Unicistic ameloblastoma is a less common variant of ameloblastoma, which refers to cystic lesions having clinical and radiographic signs of the odontogenic cyst, but histological examination reveals a typical ameloblastomatous epithelial lining of the cyst cavity with or without tumor proliferation. 2 Ameloblastomas are slow-growing, locally invasive tumors that have a benign course in most cases. Case Report A 55 year old female patient came to the department of oral medicine and radiology for the chief complaint of swelling in right posterior back tooth region and increasing in size, since 2 to 3months. Patient doesn't have any history of smoking, alcohol, betel nut. There was no associated pain, patient didn't had any difficulty in opening mouth, chewing. The patient's past medical history was normal, the patient was asymptomatic and there was no associated mandibular or intraoral swelling or lymphadenopathy of the head and neck. On extraoral examination there was a swelling of size 5.0x3.0x2.0cm, which extended from corner of lip to the angle of mandible. An extraoral radiograph that is orthopantogram (OPG) was taken which revealed the osteolytic lesion measuring approximately 5.0x3.0x2.0 cm, localized at the right retromolar area distal to the lower right first molar region and pushing the 3 rd molar to distally. On clinical examination revealed a vestibular swelling in region of molar, non erytmatous, non tender. Fnac was performed and fluid was taken from the swelling and after that it was stained which reveaed that the fi brous cyst wall with a lining that consists totally or partially of ameloblastic epithelium. It demonstrates a basal layer of columnar or cuboidal cells with hyperchromatic nuclei that show reverse polarity and basilar cytoplasmic vacuolization. The overlying epithelial cells are loosely cohesive and resemble stellate reticulum. Discussion Unicystic ameloblastoma, a variant of ameloblastoma was first described by Robinson and Martinez. 3 Unicystic ameloblastomas are most often seen in younger patients, with about 50% of tumors diagnosed during the second decade of life. 4 Most of the males are affected than females. More than 90% of unicystic ameloblastomas are found in the mandible, usually in the posterior regions. The lesion is often asymptomatic, although large lesions may cause a painless swelling of the jaws. 5 In many patients, this lesion typically appears as a circumscribed radiolucency that surrounds the crown of an unerupted mandibular third molar. Clinically resembling a dentigerous cyst. 6 In some cases the radiolucent area has scalloped margins but is still a unicystic ameloblastoma. 7 There were many debates regarding whether unicystic ameloblastoma develops de novo or arises in an existing cyst. 8 In the present situation it is seen in edentulous area of posterior region with molar (3 rd molar) being distally tilted. Possibility of misdiagnosing such cases as dentigerous cyst poses a problem where repeat and deeper biopsies are advisable to reveal the underlying tumorous structure. However the biological behaviour has to be seen as this shows quick growth. Histologically it is of 3 types namely, luminal, intraluminal, mural. These tumors are usually treated by enucleation. Recurrence rates of 10% to 20% were seen after enucleation and curettage of unicystic ameloblastoma. 9 This range is considerably less than the 50% to 90% recurrence rates noted after curettage of conventional solid and multicystic intraosseous ameloblastomas. 10 Conclusion The diagnosis of unicystic ameloblastoma is mostly based on the clinical, radiological and histological findings. The ability to predict helps to cure the disease in proper way. The pathologist also should be able to differencite this with other amelobastomas. It has to properly differenciated from dentigerous cyst as it is mostly confused with dentigerous cyst. Source of funding None.
<filename>scripts/cognito/scenarios/adminResetUserPassword.scenario.ts import assert from "assert"; import AWS from "aws-sdk"; import { createUserPoolClient, randomUsername, } from "../utils"; import { Scenario } from "./scenario"; const cognito = new AWS.CognitoIdentityServiceProvider(); const scenario: Scenario = { name: "AdminResetUserPassword", stubs: { CustomMessage_ForgotPassword: {}, CustomMessage_AdminCreateUser: {}, PreSignUp_AdminCreateUser: {} }, async setup(userPoolId) { const clientId = await createUserPoolClient(userPoolId); const username = randomUsername(); const password = "<PASSWORD>!"; await cognito .adminCreateUser({ UserPoolId: userPoolId, Username: username, TemporaryPassword: password, UserAttributes: [ { Name: "email", Value: "<EMAIL>" }, { Name: "email_verified", Value: "true" }, ], }) .promise(); await cognito .adminSetUserPassword({ UserPoolId: userPoolId, Username: username, Password: password, Permanent: true, }) .promise(); return { clientId, user: { username, password } }; }, async exec(args) { assert.ok(args.user); await cognito .adminResetUserPassword({ UserPoolId: args.userPoolId, Username: args.user.username, }) .promise(); }, }; export default scenario;
<filename>dtc.go package phash // copy pasta from https://ironchef-team21.googlecode.com/git-history/75856e07bb89645d0e56820d6e79f8219a06bfb7/ironchef_team21/src/ImagePHash.java import ( "image" "math" "sort" "github.com/disintegration/imaging" ) var ( dtcSizeBig = 32 dtcSize = 8 ) // DTC computes perceptual hash for image // using phash dtc image technique. // // 1. Reduce size to 32x32 // 2. Reduce color to greyscale // 3. Compute the DCT. // 4. Reduce the DCT to 8x8 in order to keep high frequencies. // 5. Compute the median value of 8x8 dtc. // 6. Further reduce the DCT into an uint64. func DTC(img image.Image) (phash uint64) { if img == nil { return } size := dtcSizeBig smallerSize := dtcSize /* 1. Reduce size. * Like Average Hash, pHash starts with a small image. * However, the image is larger than 8x8; 32x32 is a good size. * This is really done to simplify the DCT computation and not * because it is needed to reduce the high frequencies. */ im := imaging.Resize(img, size, size, imaging.Lanczos) /* 2. Reduce color. * The image is reduced to a grayscale just to further simplify * the number of computations. */ vals := make([]float64, size*size) for i := 0; i < size; i++ { for j := 0; j < size; j++ { vals[size*i+j] = colorToGreyScaleFloat64(im.At(i, j)) } } /* 3. Compute the DCT. * The DCT separates the image into a collection of frequencies * and scalars. While JPEG uses an 8x8 DCT, this algorithm uses * a 32x32 DCT. */ applyDCT2 := func(N int, f []float64) []float64 { // initialize coefficients c := make([]float64, N) c[0] = 1 / math.Sqrt(2) for i := 1; i < N; i++ { c[i] = 1 } // output goes here F := make([]float64, N*N) // construct a lookup table, because it's O(n^4) entries := (2 * N) * (N - 1) COS := make([]float64, entries) for i := 0; i < entries; i++ { COS[i] = math.Cos(float64(i) / float64(2*N) * math.Pi) } // the core loop inside a loop inside a loop... for u := 0; u < N; u++ { for v := 0; v < N; v++ { var sum float64 for i := 0; i < N; i++ { for j := 0; j < N; j++ { sum += COS[(2*i+1)*u] * COS[(2*j+1)*v] * f[N*i+j] } } sum *= ((c[u] * c[v]) / 4) F[N*u+v] = sum } } return F } dctVals := applyDCT2(size, vals) /* 4. Reduce the DCT. * This is the magic step. While the DCT is 32x32, just keep the * top-left 8x8. Those represent the lowest frequencies in the * picture. */ vals = make([]float64, 0, smallerSize*smallerSize) for x := 1; x <= smallerSize; x++ { for y := 1; y <= smallerSize; y++ { vals = append(vals, dctVals[size*x+y]) } } /* 5. Compute the median value. * Like the Average Hash, compute the mean DCT value (using only * the 8x8 DCT low-frequency values and excluding the first term * since the DC coefficient can be significantly different from * the other values and will throw off the average). */ sortedVals := make([]float64, smallerSize*smallerSize) copy(sortedVals, vals) sort.Float64s(sortedVals) median := sortedVals[smallerSize*smallerSize/2] /* 6. Further reduce the DCT. * This is the magic step. Set the 64 hash bits to 0 or 1 * depending on whether each of the 64 DCT values is above or * below the average value. The result doesn't tell us the * actual low frequencies; it just tells us the very-rough * relative scale of the frequencies to the mean. The result * will not vary as long as the overall structure of the image * remains the same; this will survive gamma and color histogram * adjustments without a problem. */ for n, e := range vals { if e > median { // when frequency is higher than median phash ^= (1 << uint64(n)) // set nth bit to one } } return phash }
// SignData implements the KeyManager RPC of the same name. func (m *Base) SignData(ctx context.Context, req *keymanagerv0.SignDataRequest) (*keymanagerv0.SignDataResponse, error) { if req.KeyId == "" { return nil, status.Error(codes.InvalidArgument, "key id is required") } if req.SignerOpts == nil { return nil, status.Error(codes.InvalidArgument, "signer opts is required") } var signerOpts crypto.SignerOpts switch opts := req.SignerOpts.(type) { case *keymanagerv0.SignDataRequest_HashAlgorithm: if opts.HashAlgorithm == keymanagerv0.HashAlgorithm_UNSPECIFIED_HASH_ALGORITHM { return nil, status.Error(codes.InvalidArgument, "hash algorithm is required") } signerOpts = crypto.Hash(opts.HashAlgorithm) case *keymanagerv0.SignDataRequest_PssOptions: if opts.PssOptions == nil { return nil, status.Error(codes.InvalidArgument, "PSS options are nil") } if opts.PssOptions.HashAlgorithm == keymanagerv0.HashAlgorithm_UNSPECIFIED_HASH_ALGORITHM { return nil, status.Error(codes.InvalidArgument, "hash algorithm is required") } signerOpts = &rsa.PSSOptions{ SaltLength: int(opts.PssOptions.SaltLength), Hash: crypto.Hash(opts.PssOptions.HashAlgorithm), } default: return nil, status.Errorf(codes.InvalidArgument, "unsupported signer opts type %T", opts) } privateKey := m.getPrivateKey(req.KeyId) if privateKey == nil { return nil, status.Errorf(codes.NotFound, "no such key %q", req.KeyId) } signature, err := privateKey.Sign(rand.Reader, req.Data, signerOpts) if err != nil { return nil, status.Errorf(codes.Internal, "keypair %q signing operation failed: %v", req.KeyId, err) } return &keymanagerv0.SignDataResponse{ Signature: signature, }, nil }
def densityFactor(self, value): if value<130: densfactor=0 elif value>=130 and value<=199: densfactor=1 elif value>199 and value<=299: densfactor=2 elif value>299 and value<=399: densfactor=3 else: densfactor=4 return densfactor
/* * Copyright 2014-2015 Groupon, Inc * Copyright 2014-2015 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.invoice.model; import java.math.BigDecimal; import java.util.LinkedHashMap; import java.util.List; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.LocalDate; import org.killbill.billing.catalog.api.BillingMode; import org.killbill.billing.catalog.api.BillingPeriod; import org.killbill.billing.invoice.InvoiceTestSuiteNoDB; import org.testng.Assert; import org.testng.annotations.Test; public class TestRecurringInArrear extends InvoiceTestSuiteNoDB { private static final DateTimeZone TIMEZONE = DateTimeZone.forID("Pacific/Pitcairn"); private static final BillingPeriod BILLING_PERIOD = BillingPeriod.MONTHLY; @Test(groups = "fast") public void testItemShouldNotStartInThePast() throws Exception { final LocalDate startDate = new LocalDate(2012, 7, 16); final LocalDate endDate = new LocalDate(2012, 7, 16); final LocalDate targetDate = new LocalDate(2012, 7, 16); final int billingCycleDayLocal = 15; final LinkedHashMap<LocalDate, LocalDate> expectedDates = new LinkedHashMap<LocalDate, LocalDate>(); verifyInvoiceItems(startDate, endDate, targetDate, billingCycleDayLocal, BILLING_PERIOD, expectedDates); } @Test(groups = "fast") public void testCalculateSimpleInvoiceItemWithNoEndDate() throws Exception { final LocalDate startDate = new LocalDate(new DateTime("2012-07-17T02:25:33.000Z", DateTimeZone.UTC), TIMEZONE); final LocalDate endDate = null; final LocalDate targetDate = new LocalDate(2012, 7, 16); final int billingCycleDayLocal = 15; final LinkedHashMap<LocalDate, LocalDate> expectedDates = new LinkedHashMap<LocalDate, LocalDate>(); verifyInvoiceItems(startDate, endDate, targetDate, billingCycleDayLocal, BILLING_PERIOD, expectedDates); } @Test(groups = "fast") public void testCalculateSimpleInvoiceItemWithBCDBeforeStartDay() throws Exception { final LocalDate startDate = new LocalDate(2012, 7, 16); final LocalDate endDate = new LocalDate(2012, 8, 16); final LocalDate targetDate = new LocalDate(2012, 7, 16); final int billingCycleDayLocal = 15; final LinkedHashMap<LocalDate, LocalDate> expectedDates = new LinkedHashMap<LocalDate, LocalDate>(); verifyInvoiceItems(startDate, endDate, targetDate, billingCycleDayLocal, BILLING_PERIOD, expectedDates); final LocalDate targetDate2 = new LocalDate(2012, 8, 15); expectedDates.put(new LocalDate(2012, 7, 16), new LocalDate(2012, 8, 15)); verifyInvoiceItems(startDate, endDate, targetDate2, billingCycleDayLocal, BILLING_PERIOD, expectedDates); } @Test(groups = "fast") public void testCalculateSimpleInvoiceItemWithBCDEqualsStartDay() throws Exception { final LocalDate startDate = new LocalDate(2012, 7, 16); final LocalDate endDate = new LocalDate(2012, 8, 16); final LocalDate targetDate = new LocalDate(2012, 7, 16); final int billingCycleDayLocal = 16; final LinkedHashMap<LocalDate, LocalDate> expectedDates = new LinkedHashMap<LocalDate, LocalDate>(); verifyInvoiceItems(startDate, endDate, targetDate, billingCycleDayLocal, BILLING_PERIOD, expectedDates); expectedDates.put(new LocalDate(2012, 7, 16), new LocalDate(2012, 8, 16)); final LocalDate targetDate2 = new LocalDate(2012, 8, 16); verifyInvoiceItems(startDate, endDate, targetDate2, billingCycleDayLocal, BILLING_PERIOD, expectedDates); } @Test(groups = "fast") public void testCalculateSimpleInvoiceItemWithBCDAfterStartDay() throws Exception { final LocalDate startDate = new LocalDate(2012, 7, 16); final LocalDate endDate = new LocalDate(2012, 8, 16); final LocalDate targetDate = new LocalDate(2012, 7, 16); final int billingCycleDayLocal = 17; final LinkedHashMap<LocalDate, LocalDate> expectedDates = new LinkedHashMap<LocalDate, LocalDate>(); verifyInvoiceItems(startDate, endDate, targetDate, billingCycleDayLocal, BILLING_PERIOD, expectedDates); final LocalDate targetDate2 = new LocalDate(2012, 7, 17); expectedDates.put(new LocalDate(2012, 7, 16), new LocalDate(2012, 7, 17)); verifyInvoiceItems(startDate, endDate, targetDate2, billingCycleDayLocal, BILLING_PERIOD, expectedDates); } @Test(groups = "fast") public void testCalculateSimpleInvoiceItemWithBCDBeforeStartDayWithTargetDateIn3Months() throws Exception { final LocalDate startDate = new LocalDate(2012, 7, 16); final LocalDate endDate = null; final LocalDate targetDate = new LocalDate(2012, 10, 16); final int billingCycleDayLocal = 15; final LinkedHashMap<LocalDate, LocalDate> expectedDates = new LinkedHashMap<LocalDate, LocalDate>(); expectedDates.put(new LocalDate(2012, 7, 16), new LocalDate(2012, 8, 15)); expectedDates.put(new LocalDate(2012, 8, 15), new LocalDate(2012, 9, 15)); expectedDates.put(new LocalDate(2012, 9, 15), new LocalDate(2012, 10, 15)); verifyInvoiceItems(startDate, endDate, targetDate, billingCycleDayLocal, BILLING_PERIOD, expectedDates); } @Test(groups = "fast") public void testCalculateSimpleInvoiceItemWithBCDEqualsStartDayWithTargetDateIn3Months() throws Exception { final LocalDate startDate = new LocalDate(2012, 7, 16); final LocalDate endDate = null; final LocalDate targetDate = new LocalDate(2012, 10, 16); final int billingCycleDayLocal = 16; final LinkedHashMap<LocalDate, LocalDate> expectedDates = new LinkedHashMap<LocalDate, LocalDate>(); expectedDates.put(new LocalDate(2012, 7, 16), new LocalDate(2012, 8, 16)); expectedDates.put(new LocalDate(2012, 8, 16), new LocalDate(2012, 9, 16)); expectedDates.put(new LocalDate(2012, 9, 16), new LocalDate(2012, 10, 16)); verifyInvoiceItems(startDate, endDate, targetDate, billingCycleDayLocal, BILLING_PERIOD, expectedDates); } @Test(groups = "fast") public void testCalculateSimpleInvoiceItemWithBCDAfterStartDayWithTargetDateIn3Months() throws Exception { final LocalDate startDate = new LocalDate(2012, 7, 16); final LocalDate endDate = null; final LocalDate targetDate = new LocalDate(2012, 10, 16); final int billingCycleDayLocal = 17; final LinkedHashMap<LocalDate, LocalDate> expectedDates = new LinkedHashMap<LocalDate, LocalDate>(); expectedDates.put(new LocalDate(2012, 7, 16), new LocalDate(2012, 7, 17)); expectedDates.put(new LocalDate(2012, 7, 17), new LocalDate(2012, 8, 17)); expectedDates.put(new LocalDate(2012, 8, 17), new LocalDate(2012, 9, 17)); verifyInvoiceItems(startDate, endDate, targetDate, billingCycleDayLocal, BILLING_PERIOD, expectedDates); } private void verifyInvoiceItems(final LocalDate startDate, final LocalDate endDate, final LocalDate targetDate, final int billingCycleDayLocal, final BillingPeriod billingPeriod, final LinkedHashMap<LocalDate, LocalDate> expectedDates) throws InvalidDateSequenceException { final RecurringInvoiceItemDataWithNextBillingCycleDate invoiceItemsWithDates = fixedAndRecurringInvoiceItemGenerator.generateInvoiceItemData(startDate, endDate, targetDate, billingCycleDayLocal, billingPeriod, BillingMode.IN_ARREAR); final List<RecurringInvoiceItemData> invoiceItems = invoiceItemsWithDates.getItemData(); int i = 0; for (final LocalDate periodStartDate : expectedDates.keySet()) { Assert.assertEquals(invoiceItems.get(i).getStartDate(), periodStartDate); Assert.assertEquals(invoiceItems.get(i).getEndDate(), expectedDates.get(periodStartDate)); Assert.assertTrue(invoiceItems.get(0).getNumberOfCycles().compareTo(BigDecimal.ONE) <= 0); i++; } Assert.assertEquals(invoiceItems.size(), i); } }
import * as AWS from 'aws-sdk' import { DocumentClient } from 'aws-sdk/lib/dynamodb/document_client' import { NoSQLDatabase } from './NoSQLDatabase' import { NotImplementedErr } from '../../error/Error' import { Misc } from '../../collections/Misc' export class AwsDynamodb<T extends { id: string }> implements NoSQLDatabase<T> { ddb: DocumentClient table: string constructor(region: string, table: string) { AWS.config.update({ region }) this.ddb = new AWS.DynamoDB.DocumentClient() this.table = table } async select(id: string): Promise<T> { const params = { TableName: this.table, Key: { id } } return new Promise((resolve, reject) => { this.ddb.get(params, (err: any, data: any) => { if (err) { reject(err) } else if (!data || !data.Item) { reject(new Error(`No items retrieved from: ${this.table} : {id:${id}}`)) } else { resolve(Misc.convertISOToDateObj<T>(data.Item)) } }) }) } // FIXME: Sanitize keys with empty/null/undefined values async insert(data: T): Promise<T> { const params = { TableName: this.table, Item: Misc.convertDateObjToISO<T>(data) } return new Promise((resolve, reject) => { this.ddb.put(params, (err: any, _data: any) => { if (err) { reject(err) } else { resolve(data) } }) }) } async scan(): Promise<T[]> { return new Promise((resolve, reject) => { this.ddb.scan({ TableName: this.table }, (err: any, data: any) => { if (err) { reject(err) } else if (!data || !data.Items || data.Items.length <= 0) { resolve([]) } else { resolve(Misc.convertISOToDateObj<T[]>(data.Items)) } }) }) } async update(data: { id: string } & Partial<T>): Promise<T> { const current_data: T = await this.select(data.id) const new_data = { ...current_data, ...data } await this.insert(new_data) return new_data } async delete(id: string): Promise<void> { // console.log({ TableName: this.table, Key: { id: id } }) return new Promise((resolve, reject) => { this.ddb.delete({ TableName: this.table, Key: { id } }, (err: any, _data: any) => { if (err) { reject(err) } else { resolve() } }) }) } async deleteAll(): Promise<void> { throw new NotImplementedErr('AwsDynamodb.deleteAll() has not yet been implemented') } }
Effects of Effective-Mass Hamiltonian Forms on Valence Band Structures of Quantum Wells Calculations have been made of the valence band structures of two typical quantum well films, GaAs/Al0.3Ga0.7As and In0.53Ga0.47As/InP, using two kinds of effective-mass Hamiltonian forms (characterized by β=0 and β=-1). While the two methods produce no significant difference for GaAs/Al0.3Ga0.7As, a great difference is revealed for In0.53Ga0.47As/InP. The method using β=0 appears to be a better choice for the latter system.
#include<bits/stdc++.h> using namespace std; typedef long long ll; int main() { int n;cin>>n; vector<int> v; for(int i=1;i<=n;i++){ int tmp;scanf("%d",&tmp);v.push_back(tmp); } sort(v.begin(),v.end()); set<int> st; int l=0; for(int i=0;i<v.size();i++){ if(v[i]>l+1){ v[i]--;st.insert(v[i]);l=v[i]; } else if(v[i]==l+1){ l=v[i];st.insert(v[i]); }else if(v[i]==l){ v[i]++;l=v[i];st.insert(v[i]); } } cout<<st.size()<<endl; return 0; }
<reponame>pkgcraft/pkgcraft use scallop::builtins::{output_error_func, Builtin, ExecStatus}; use scallop::variables::string_value; use scallop::{source, Error, Result}; static LONG_DOC: &str = "\ Export stub functions that call the eclass's functions, thereby making them default. For example, if ECLASS=base and `EXPORT_FUNCTIONS src_unpack` is called the following function is defined: src_unpack() { base_src_unpack; }"; #[doc = stringify!(LONG_DOC)] pub(crate) fn run(args: &[&str]) -> Result<ExecStatus> { let eclass = match string_value("ECLASS") { Some(val) => val, None => return Err(Error::new("no ECLASS defined")), }; let funcs: Vec<String> = args .iter() .map(|func| { format!( "{func}() {{ {eclass}_{func} \"$@\"; }}", func = func, eclass = eclass ) }) .collect(); source::string(funcs.join("\n"))?; Ok(ExecStatus::Success) } pub static BUILTIN: Builtin = Builtin { name: "EXPORT_FUNCTIONS", func: run, help: LONG_DOC, usage: "EXPORT_FUNCTIONS src_configure src_compile", error_func: Some(output_error_func), };
# Copyright (c) OpenMMLab. All rights reserved. import torch from mmcv.parallel import MMDataParallel from .scatter_gather import scatter_kwargs class MLUDataParallel(MMDataParallel): """The MLUDataParallel module that supports DataContainer. MLUDataParallel is a class inherited from MMDataParall, which supports MLU training and inference only. The main differences with MMDataParallel: - It only supports single-card of MLU, and only use first card to run training and inference. - It uses direct host-to-device copy instead of stream-background scatter. .. warning:: MLUDataParallel only supports single MLU training, if you need to train with multiple MLUs, please use MLUDistributedDataParallel instead. If you have multiple MLUs, you can set the environment variable ``MLU_VISIBLE_DEVICES=0`` (or any other card number(s)) to specify the running device. Args: module (:class:`nn.Module`): Module to be encapsulated. dim (int): Dimension used to scatter the data. Defaults to 0. """ def __init__(self, *args, dim=0, **kwargs): super().__init__(*args, dim=dim, **kwargs) self.device_ids = [0] self.src_device_obj = torch.device('mlu:0') def scatter(self, inputs, kwargs, device_ids): return scatter_kwargs(inputs, kwargs, device_ids, dim=self.dim)
// CancelOrder closes an open order. func (c *Client) CancelOrder(orderId Id) (*CancelOrderResponse, error) { urlStr := fmt.Sprintf("/v1/accounts/%d/orders/%d", c.accountId, orderId) cor := CancelOrderResponse{} if err := requestAndDecode(c, "DELETE", urlStr, nil, &cor); err != nil { return nil, err } return &cor, nil }
class ChameleonCard: """Chameleon (fire, instant) - one point, is counted as an appropriate animal in further instant effects""" def __init__(self): self.element = 'Fire' self.type = 'instant' def __repr__(self): return "Chameleon" def get_instant_points(self, p_totems, x, y): points = 1 return points def get_eog_points(self): pass
import React, {FormEvent, useEffect, useState} from "react"; import { Button, Checkbox, Container, Dropdown, Form, Header, Message, Rating, RatingProps, TextArea } from "semantic-ui-react"; import {Book, RootState} from "./types"; import {connect, ConnectedProps} from "react-redux"; import {login} from "./actions"; import {DropdownProps} from "semantic-ui-react/dist/commonjs/modules/Dropdown/Dropdown"; import {Link} from "react-router-dom"; import {keycloakLogin} from "./KeycloakService"; const mapState = (state: RootState) => ({ isAuthenticated: state.authentication.isAuthenticated, token: state.authentication.details?.token }); const mapDispatch = { login } const connector = connect(mapState, mapDispatch); type PropsFromRedux = ConnectedProps<typeof connector> type Props = PropsFromRedux & {} const SubmitReviewContainer: React.FC<Props> = ({isAuthenticated, token}) => { const [bookOptions, setBookOptions] = useState<any>(); const [isbn, setIsbn] = useState<string | number | boolean | (string | number | boolean)[] | undefined>(""); const [reviewTitle, setReviewTitle] = useState(""); const [reviewContent, setReviewContent] = useState(""); const [rating, setRating] = useState<number | string | undefined>(0); const [confirmation, setConfirmation] = useState<boolean>(false); const [success, setSuccess] = useState<boolean>(false); const [errorMessage, setErrorMessage] = useState<string>(""); const handleSubmit = (evt: FormEvent) => { evt.preventDefault(); setSuccess(false) setErrorMessage("") fetch(`/api/books/${isbn}/reviews`, { method: 'POST', headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${token}` }, body: JSON.stringify({ reviewTitle, reviewContent, rating }) }).then(result => { if (result.status === 201) { setSuccess(true) setConfirmation(false) setRating(0) setReviewTitle("") setReviewContent("") setIsbn("") } else if (result.status === 418) { setErrorMessage('Your review does not meet the quality standards, please read them carefully and submit again.') } else { setErrorMessage(`We could not store your review, please try again later: ${result.status}`) } }).catch(error => { setErrorMessage(`We could not store your review, please try again later: ${error}`) }); } useEffect(() => { fetch('/api/books') .then(result => result.json()) .then((result: Book[]) => { const formattedBooks = result.map((book: Book) => { return { "key": book.isbn, "text": `${book.title} - ${book.author}`, "value": book.isbn, "image": {"src": book.thumbnailUrl} } }); setBookOptions(formattedBooks); }) }, []); return ( <Container> <Header as='h2' textAlign='center'>Submit a new book review</Header> {isAuthenticated ? <Form size='large' onSubmit={(e: FormEvent) => handleSubmit(e)} success={success} error={errorMessage !== ""}> <Message success header='This was a success' content={<span>You successfully submitted a <Link to='/all-reviews'>new book review</Link>.</span>} /> <Message error header='There was an error' content={errorMessage} /> <Form.Dropdown loading={bookOptions == null} label='Select a book' id='book-selection' required control={Dropdown} placeholder='Search for a book' fluid clearable search selection value={isbn} onChange={(event: React.SyntheticEvent<HTMLElement>, data: DropdownProps) => setIsbn(data.value)} options={bookOptions || []} /> <Form.Input label='Title' placeholder='Enter the title of your review' id='review-title' value={reviewTitle} onChange={(e: any) => setReviewTitle(e.target.value)} required /> <Form.Field label='Your rating' control={Rating} id='book-rating' icon='star' size='huge' rating={rating} maxRating={5} onRate={(event: React.MouseEvent<HTMLDivElement>, data: RatingProps) => setRating(data.rating)} required clearable /> <Form.Field control={TextArea} label='Your review' id='review-content' placeholder='Enter your book review...' value={reviewContent} onChange={(e: any) => setReviewContent(e.target.value)} required /> <Message> <Message.Header>Quality standards for your review</Message.Header> <Message.List> <Message.Item>The review contains at least 10 words</Message.Item> <Message.Item>Swear words are not allowed</Message.Item> <Message.Item>Don't use 'I' or 'good' too often</Message.Item> </Message.List> <Button type="button" style={{marginTop: "5px"}} onClick={() => setReviewContent("This is an excellent book. I've learned quite a lot and can recommend it to every CS student.")}> Prefill review content</Button> </Message> <Form.Field control={Checkbox} checked={confirmation} onChange={() => setConfirmation(!confirmation)} label='I hereby affirm that I have read the book' /> <Button id='review-submit' secondary type='submit'>Submit your review </Button> </Form> : <Message icon='lock' header='Restricted area' content={<span>To submit a new book review, please <span style={{color: "rgb(30, 112, 191)", cursor: "pointer"}} onClick={() => keycloakLogin()}>login</span> first.</span>} /> } </Container> ); } export default connector(SubmitReviewContainer);
// Meta sets the Meta for the Grant request. func (b *grantBuilder) Meta(meta map[string]interface{}) *grantBuilder { b.opts.Meta = meta return b }
//******************************************************************************* // // Methods for Converting Options into strings // //******************************************************************************* Status Configurable::GetOptionString(const ConfigOptions& config_options, std::string* result) const { assert(result); result->clear(); #ifndef ROCKSDB_LITE return ConfigurableHelper::SerializeOptions(config_options, *this, "", result); #else (void)config_options; return Status::NotSupported("GetOptionString not supported in LITE mode"); #endif }
// mergeTem merges a single template file using tem format and the passed in variables func (t *TemplMerger) mergeTem(tem []byte, env Envar) ([]byte, error) { content := string(tem) vars := t.regex.FindAll(tem, -1) for _, v := range vars { defValue := "" vname := strings.TrimSuffix(strings.TrimPrefix(string(v), "${"), "}") cut := strings.Index(vname, ":") if cut > 0 { defValue = vname[cut+1:] vname = vname[0:cut] } if vname == "PWD" { fmt.Errorf("environment variable cannot be PWD, choose a different name\n") } ev := env.Vars[vname] if len(ev) == 0 { if len(defValue) == 0 { return nil, fmt.Errorf("environment variable '%s' required and not defined, cannot merge\n", vname) } else { content = strings.Replace(content, string(v), defValue, -1) } } else { content = strings.Replace(content, string(v), ev, -1) } } return []byte(content), nil }
// First return the first item in the slice func (nas NodeAccounts) First() NodeAccount { if len(nas) > 0 { return nas[0] } return NodeAccount{} }
async def remove(self, ctx, member: discord.Member): rc = cf_common.user_db.remove_handle(member.id, ctx.guild.id) if not rc: raise HandleCogError(f'Handle for {member.mention} not found in database') await self.update_member_rank_role(member, role_to_assign=None, reason='Handle removed for user') await self.update_member_star_role(member, role_to_assign=None, reason='Handle removed for user') embed = discord_common.embed_success(f'Removed handle for {member.mention}') await ctx.send(embed=embed)
<gh_stars>0 # Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- import logging # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys import benchbuild.utils # -- Project information ----------------------------------------------------- from pkg_resources import DistributionNotFound, get_distribution sys.path.insert(0, os.path.abspath('../../')) # pylint: skip-file try: __version__ = get_distribution("varats").version except DistributionNotFound: pass project = 'VaRA' copyright = '2020, <NAME>' author = '<NAME>' # The full version, including alpha/beta/rc tags release = __version__ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.napoleon', 'sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.autosectionlabel', 'sphinxcontrib.programoutput', 'sphinx_autodoc_typehints', ] # Add any paths that contain templates here, relative to this directory. # templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'haiku' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # -- Extension configuration ------------------------------------------------- pygments_style = 'sphinx' autodoc_member_order = "bysource" add_function_parentheses = True add_module_names = True # Import pandas without the type checking flag to avoid import errors. # The exact reason for these errors is unknown but might be related to # incompatible cython versions (https://github.com/cython/cython/issues/1953) import pandas # isort:skip # The autodocs typehints plugin does not resolve circular imports caused by type # annotations, so we have to manually break the circles. import rich.console # isort:skip import click # isort:skip import typing as tp # isort:skip tp.TYPE_CHECKING = True import varats.mapping.commit_map # isort:skip import varats.plot.plot # isort:skip tp.TYPE_CHECKING = False # set the type checking flag so all types can be resolved in the docs set_type_checking_flag = True # -- Prevent import warnings ------------------------------------------------- benchbuild.utils.LOG.setLevel(logging.ERROR)
/** * %clear selected macro sets either globally or in contexts */ static void do_clear(enum clear_what what, bool context) { if (context) { if (what & CLEAR_ALLDEFINE) { Context *ctx; list_for_each(ctx, cstk) clear_smacro_table(&ctx->localmac, what); } } else { if (what & CLEAR_ALLDEFINE) clear_smacro_table(&smacros, what); if (what & CLEAR_MMACRO) free_mmacro_table(&mmacros); } }
/** * Manages the lifecycle of a feed surface represented by {@link FeedSurfaceCoordinator} associated * with an Activity. */ public class FeedSurfaceLifecycleManager implements ApplicationStatus.ActivityStateListener { /** The different states that the Stream can be in its lifecycle. */ // TODO(chili): Clean up unused SHOWN/HIDDEN states. @IntDef({SurfaceState.NOT_SPECIFIED, SurfaceState.CREATED, SurfaceState.SHOWN, SurfaceState.ACTIVE, SurfaceState.INACTIVE, SurfaceState.HIDDEN, SurfaceState.DESTROYED}) @Retention(RetentionPolicy.SOURCE) protected @interface SurfaceState { int NOT_SPECIFIED = -1; int CREATED = 0; int SHOWN = 1; int ACTIVE = 2; int INACTIVE = 3; int HIDDEN = 4; int DESTROYED = 5; } /** The {@link FeedSurfaceCoordinator} that this class updates. */ protected final FeedSurfaceCoordinator mCoordinator; /** The current state the feed is in its lifecycle. */ protected @SurfaceState int mSurfaceState = SurfaceState.NOT_SPECIFIED; /** The {@link Activity} that {@link #mCoordinator} is attached to. */ private final Activity mActivity; /** * @param activity The {@link Activity} that the {@link FeedSurfaceCoordinator} is attached to. * @param coordinator The coordinator managing the feed surface. */ public FeedSurfaceLifecycleManager(Activity activity, FeedSurfaceCoordinator coordinator) { mActivity = activity; mCoordinator = coordinator; } /** * Notifies the feed that it should show if it can. */ protected void start() { mSurfaceState = SurfaceState.CREATED; show(); ApplicationStatus.registerStateListenerForActivity(this, mActivity); } @Override public void onActivityStateChange(Activity activity, int newState) { switch (newState) { case ActivityState.STARTED: case ActivityState.RESUMED: show(); break; case ActivityState.STOPPED: hide(); break; case ActivityState.DESTROYED: destroy(); break; case ActivityState.PAUSED: // Do nothing for pause. break; case ActivityState.CREATED: default: assert false : "Unhandled activity state change: " + newState; } } /** @return Whether the {@link FeedSurfaceCoordinator} can be shown. */ protected boolean canShow() { final int state = ApplicationStatus.getStateForActivity(mActivity); return (mSurfaceState == SurfaceState.CREATED || mSurfaceState == SurfaceState.HIDDEN) && (state == ActivityState.STARTED || state == ActivityState.RESUMED); } /** Calls {@link FeedSurfaceCoordinator#onSurfaceOpened()} ()}. */ protected void show() { if (!canShow()) return; mSurfaceState = SurfaceState.SHOWN; mCoordinator.restoreInstanceState(restoreInstanceState()); mCoordinator.onSurfaceOpened(); } /** Calls {@link FeedSurfaceCoordinator#onSurfaceClosed()} ()}. */ protected void hide() { if (mSurfaceState == SurfaceState.HIDDEN || mSurfaceState == SurfaceState.CREATED || mSurfaceState == SurfaceState.DESTROYED) { return; } // Make sure the feed is inactive before setting it to hidden state. mSurfaceState = SurfaceState.HIDDEN; // Save instance state as the feed begins to hide. This matches the activity lifecycle // that instance state is saved as the activity begins to stop. saveInstanceState(); mCoordinator.onSurfaceClosed(); } /** * Clears any dependencies. The coordinator will be destroyed by its owner. */ protected void destroy() { if (mSurfaceState == SurfaceState.DESTROYED) return; // Make sure the feed is hidden before setting it to destroyed state. hide(); mSurfaceState = SurfaceState.DESTROYED; ApplicationStatus.unregisterActivityStateListener(this); } /** Save the feed instance state if necessary. */ protected void saveInstanceState() {} /** * @return The saved feed instance state, or null if it is not previously * saved. */ @Nullable protected String restoreInstanceState() { return null; } }
/** * @author Heiko Braun */ @RunWith(Arquillian.class) public class OpenshiftIT { @RouteURL("${app.name}") private URL url; @Before public void setup() throws Exception { await().atMost(5, TimeUnit.MINUTES).until(() -> { try { return get(url).getStatusCode() == 200; } catch (Exception e) { return false; } }); RestAssured.baseURI = url + "api"; } @Test public void testServiceInvocation() { when() .get("/greeting") .then() .assertThat().statusCode(200) .assertThat().body(containsString("Hello, World!")); } @Test public void testServiceStoppedAndRestarted() throws Exception { when() .get("/greeting") .then() .assertThat().statusCode(200) .assertThat().body(containsString("Hello, World!")); // suspend service when() .get("/stop") .then() .assertThat().statusCode(200); awaitStatus(503, Duration.ofSeconds(30)); long begin = System.currentTimeMillis(); awaitStatus(200, Duration.ofMinutes(3)); long end = System.currentTimeMillis(); System.out.println("Failure recovered in " + (end - begin) + " ms"); } private void awaitStatus(int status, Duration duration) { await().atMost(duration.getSeconds(), TimeUnit.SECONDS).until(() -> { try { Response response = get("/greeting"); return response.getStatusCode() == status; } catch (Exception e) { return false; } }); } }
# Copyright (c) OpenMMLab. All rights reserved. import argparse import logging from mmdeploy.apis.snpe import from_onnx from mmdeploy.utils import get_root_logger def parse_args(): parser = argparse.ArgumentParser( description='Convert ONNX to snpe dlc format.') parser.add_argument('onnx_path', help='ONNX model path') parser.add_argument('output_prefix', help='output snpe dlc model path') parser.add_argument( '--log-level', help='set log level', default='INFO', choices=list(logging._nameToLevel.keys())) args = parser.parse_args() return args def main(): args = parse_args() logger = get_root_logger(log_level=args.log_level) onnx_path = args.onnx_path output_prefix = args.output_prefix logger.info(f'onnx2dlc: \n\tonnx_path: {onnx_path} ') from_onnx(onnx_path, output_prefix) logger.info('onnx2dlc success.') if __name__ == '__main__': main()
/** * Receives server's answer, 0x03 followed by 1536 bytes from previous step followed by 1536 random bytes from server * @throws IOException */ public void receiveAnswer ( ) throws IOException { try { int bytes = socket.read( replyBuffer ); if ( bytes == -1 ) throw new IOException( "Disconnected at handshake" ); if ( !replyBuffer.hasRemaining( ) ) { replyBuffer.position( 1 ); byte [ ] temp = new byte[1536]; replyBuffer.get( temp ); firstBuffer.put( temp ); firstBuffer.rewind( ); state = 2; } } catch ( IOException exception ) { throw exception; } }
/** * Provides cached access to XML and XSL data. Events Cache instances publish two types of events for every operation that modifies the contents of the cache. The schemas of the two event types are subject - Cache.CHANGE id or ids - the ID or array of IDs of the modified documents action - Cache.ADD, Cache.CHANGE or Cache.REMOVE and subject - the cache ID of the modified document action - Cache.ADD, Cache.CHANGE or Cache.REMOVE Asynchronous Loading Cache documents can be loaded asychronously with the getOrOpenAsync() method. This method returns the corresponding document synchronously if it already exists in the cache. If the document does not exist in the cache, then it is loaded asynchronously and the method returns a placeholder document. The namespace URI of this placeholder document is Cache.XSDNS and its root node name is "loading". Since the cache stores this placeholder document until the document finishes loading, subsequent calls to synchronous APIs (getDocument(), getOrOpenDocument(), etc) may also return the placeholder document. It is therefore important to check the namespace of the returned document when any code uses the asynchronous APIs. Once a document finishes loading asynchronously the placeholder document is replaced with the loaded document. This change in value causes the cache to publish a pair of events of action Cache.CHANGE. If loading the document fails or times out the placeholder document is instead replaced with another placeholder document. This document also has a URI namespace of Cache.XSDNS. Its root node name may be either "error" or "timeout". If the root node name is "error" then the root node has an attribute, also named "error", which contains the XML error message. * @author Joe Walker [joe at getahead dot org] * @author DRAPGEN - Dwr Reverse Ajax Proxy GENerator */ public class Cache extends jsx3.lang.Object { /** * All reverse ajax proxies need context to work from * @param context The script that got us to where we are now */ public Cache(Context context, String extension) { super(context, extension); } /** * Creates a new instance of this class. */ public Cache() { super((Context) null, (String) null); ScriptBuffer script = new ScriptBuffer(); script.appendCall("new Cache"); setInitScript(script); } /** * Event action. */ public static final String REMOVE = "remove"; /** * Event action. */ public static final String ADD = "add"; /** * Event subject and action. */ public static final String CHANGE = "change"; /** * The number of milliseconds before asynchronous document loads time out. */ public static final int ASYNC_TIMEOUT = 60000; /** * */ public static final String XSDNS = "http://xsd.tns.tibco.com/gi/cache"; /** * Removes the document stored in this cache under id strId. * @param strId * @return the remove document, if any. */ public jsx3.xml.CdfDocument clearById(String strId) { String extension = "clearById(\"" + strId + "\")."; try { java.lang.reflect.Constructor<jsx3.xml.CdfDocument> ctor = jsx3.xml.CdfDocument.class.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported type: " + jsx3.xml.CdfDocument.class.getName()); } } /** * Removes the document stored in this cache under id strId. * @param strId * @param returnType The expected return type * @return the remove document, if any. */ public <T> T clearById(String strId, Class<T> returnType) { String extension = "clearById(\"" + strId + "\")."; try { java.lang.reflect.Constructor<T> ctor = returnType.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported return type: " + returnType.getName()); } } /** * Removes all documents placed in this cache before intTimestamp. * @param intTimestamp epoch seconds or a date object. * @param callback the ids of the removed documents. */ public void clearByTimestamp(java.util.Date intTimestamp, org.directwebremoting.ui.Callback<Object[]> callback) { ScriptBuffer script = new ScriptBuffer(); String callbackPrefix = ""; if (callback != null) { callbackPrefix = "var reply = "; } script.appendCall(callbackPrefix + getContextPath() + "clearByTimestamp", intTimestamp); if (callback != null) { String key = org.directwebremoting.extend.CallbackHelperFactory.get().saveCallback(callback, Object[].class); script.appendCall("__System.activateCallback", key, "reply"); } ScriptSessions.addScript(script); } /** * Removes all documents placed in this cache before intTimestamp. * @param intTimestamp epoch seconds or a date object. * @param callback the ids of the removed documents. */ public void clearByTimestamp(int intTimestamp, org.directwebremoting.ui.Callback<Object[]> callback) { ScriptBuffer script = new ScriptBuffer(); String callbackPrefix = ""; if (callback != null) { callbackPrefix = "var reply = "; } script.appendCall(callbackPrefix + getContextPath() + "clearByTimestamp", intTimestamp); if (callback != null) { String key = org.directwebremoting.extend.CallbackHelperFactory.get().saveCallback(callback, Object[].class); script.appendCall("__System.activateCallback", key, "reply"); } ScriptSessions.addScript(script); } /** * Returns the document stored in this cache under id strId. * @param strId * @return the stored document or <code>null</code> if none exists. */ public jsx3.xml.CdfDocument getDocument(String strId) { String extension = "getDocument(\"" + strId + "\")."; try { java.lang.reflect.Constructor<jsx3.xml.CdfDocument> ctor = jsx3.xml.CdfDocument.class.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported type: " + jsx3.xml.CdfDocument.class.getName()); } } /** * Returns the document stored in this cache under id strId. * @param strId * @param returnType The expected return type * @return the stored document or <code>null</code> if none exists. */ public <T> T getDocument(String strId, Class<T> returnType) { String extension = "getDocument(\"" + strId + "\")."; try { java.lang.reflect.Constructor<T> ctor = returnType.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported return type: " + returnType.getName()); } } /** * Retrieves a document from this cache or, if this cache contains no such document, loads the document synchronously and returns it. * @param strURL the URI of the document. * @param strId the id under which the document is/will be stored. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance if a new document is opened. * @return the document retrieved from the cache or loaded. */ public jsx3.xml.CdfDocument getOrOpenDocument(String strURL, String strId, Class<?> objClass) { String extension = "getOrOpenDocument(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<jsx3.xml.CdfDocument> ctor = jsx3.xml.CdfDocument.class.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported type: " + jsx3.xml.CdfDocument.class.getName()); } } /** * Retrieves a document from this cache or, if this cache contains no such document, loads the document synchronously and returns it. * @param strURL the URI of the document. * @param strId the id under which the document is/will be stored. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance if a new document is opened. * @param returnType The expected return type * @return the document retrieved from the cache or loaded. */ public <T> T getOrOpenDocument(String strURL, String strId, Class<?> objClass, Class<T> returnType) { String extension = "getOrOpenDocument(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<T> ctor = returnType.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported return type: " + returnType.getName()); } } /** * Retrieves a document from this cache or, if this cache contains no such document, loads the document synchronously and returns it. * @param strURL the URI of the document. * @param strId the id under which the document is/will be stored. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance if a new document is opened. * @return the document retrieved from the cache or loaded. */ public jsx3.xml.CdfDocument getOrOpenDocument(java.net.URI strURL, String strId, Class<?> objClass) { String extension = "getOrOpenDocument(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<jsx3.xml.CdfDocument> ctor = jsx3.xml.CdfDocument.class.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported type: " + jsx3.xml.CdfDocument.class.getName()); } } /** * Retrieves a document from this cache or, if this cache contains no such document, loads the document synchronously and returns it. * @param strURL the URI of the document. * @param strId the id under which the document is/will be stored. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance if a new document is opened. * @param returnType The expected return type * @return the document retrieved from the cache or loaded. */ public <T> T getOrOpenDocument(java.net.URI strURL, String strId, Class<?> objClass, Class<T> returnType) { String extension = "getOrOpenDocument(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<T> ctor = returnType.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported return type: " + returnType.getName()); } } /** * Synchronously loads an xml document, stores it in this cache, and returns the loaded document. * @param strURL url (relative or absolute) the URI of the document to open. * @param strId the id under which to store the document. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance. * @return the loaded document object. */ public jsx3.xml.CdfDocument openDocument(String strURL, String strId, Class<?> objClass) { String extension = "openDocument(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<jsx3.xml.CdfDocument> ctor = jsx3.xml.CdfDocument.class.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported type: " + jsx3.xml.CdfDocument.class.getName()); } } /** * Synchronously loads an xml document, stores it in this cache, and returns the loaded document. * @param strURL url (relative or absolute) the URI of the document to open. * @param strId the id under which to store the document. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance. * @param returnType The expected return type * @return the loaded document object. */ public <T> T openDocument(String strURL, String strId, Class<?> objClass, Class<T> returnType) { String extension = "openDocument(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<T> ctor = returnType.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported return type: " + returnType.getName()); } } /** * Synchronously loads an xml document, stores it in this cache, and returns the loaded document. * @param strURL url (relative or absolute) the URI of the document to open. * @param strId the id under which to store the document. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance. * @return the loaded document object. */ public jsx3.xml.CdfDocument openDocument(java.net.URI strURL, String strId, Class<?> objClass) { String extension = "openDocument(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<jsx3.xml.CdfDocument> ctor = jsx3.xml.CdfDocument.class.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported type: " + jsx3.xml.CdfDocument.class.getName()); } } /** * Synchronously loads an xml document, stores it in this cache, and returns the loaded document. * @param strURL url (relative or absolute) the URI of the document to open. * @param strId the id under which to store the document. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance. * @param returnType The expected return type * @return the loaded document object. */ public <T> T openDocument(java.net.URI strURL, String strId, Class<?> objClass, Class<T> returnType) { String extension = "openDocument(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<T> ctor = returnType.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported return type: " + returnType.getName()); } } /** * Asynchronously loads an xml document and stores it in this cache. * @param strURL url (relative or absolute) the URI of the document to open. * @param strId the id under which to store the document. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance. * @return the document retrieved from the cache or a placeholder document if the document is in the process of loading asynchronously. */ public jsx3.xml.CdfDocument getOrOpenAsync(String strURL, String strId, Class<?> objClass) { String extension = "getOrOpenAsync(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<jsx3.xml.CdfDocument> ctor = jsx3.xml.CdfDocument.class.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported type: " + jsx3.xml.CdfDocument.class.getName()); } } /** * Asynchronously loads an xml document and stores it in this cache. * @param strURL url (relative or absolute) the URI of the document to open. * @param strId the id under which to store the document. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance. * @param returnType The expected return type * @return the document retrieved from the cache or a placeholder document if the document is in the process of loading asynchronously. */ public <T> T getOrOpenAsync(String strURL, String strId, Class<?> objClass, Class<T> returnType) { String extension = "getOrOpenAsync(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<T> ctor = returnType.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported return type: " + returnType.getName()); } } /** * Asynchronously loads an xml document and stores it in this cache. * @param strURL url (relative or absolute) the URI of the document to open. * @param strId the id under which to store the document. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance. * @return the document retrieved from the cache or a placeholder document if the document is in the process of loading asynchronously. */ public jsx3.xml.CdfDocument getOrOpenAsync(java.net.URI strURL, String strId, Class<?> objClass) { String extension = "getOrOpenAsync(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<jsx3.xml.CdfDocument> ctor = jsx3.xml.CdfDocument.class.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported type: " + jsx3.xml.CdfDocument.class.getName()); } } /** * Asynchronously loads an xml document and stores it in this cache. * @param strURL url (relative or absolute) the URI of the document to open. * @param strId the id under which to store the document. If this parameter is not provided, the <code>strURL</code> parameter is used as the id. * @param objClass <code>jsx3.xml.Document</code> (default value) or one of its subclasses. The class with which to instantiate the new document instance. * @param returnType The expected return type * @return the document retrieved from the cache or a placeholder document if the document is in the process of loading asynchronously. */ public <T> T getOrOpenAsync(java.net.URI strURL, String strId, Class<?> objClass, Class<T> returnType) { String extension = "getOrOpenAsync(\"" + strURL + "\", \"" + strId + "\", \"" + objClass + "\")."; try { java.lang.reflect.Constructor<T> ctor = returnType.getConstructor(Context.class, String.class); return ctor.newInstance(this, extension); } catch (Exception ex) { throw new IllegalArgumentException("Unsupported return type: " + returnType.getName()); } } /** * Stores the document objDocument in this cache under id strId. If a document already exists in this cache under strId then that document is removed from the cache. * @param strId the id under which to store <code>objDocument</code>. * @param objDocument */ public void setDocument(String strId, jsx3.xml.CdfDocument objDocument) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "setDocument", strId, objDocument); ScriptSessions.addScript(script); } /** * Returns the timestamp from when the document stored under id strId was stored in this cache. * @param strId the id under which the document is stored. * @param callback the timestamp as an integer (epoch seconds) or <code>null</code> if no such document exists in this cache. */ public void getTimestamp(String strId, org.directwebremoting.ui.Callback<Integer> callback) { ScriptBuffer script = new ScriptBuffer(); String callbackPrefix = ""; if (callback != null) { callbackPrefix = "var reply = "; } script.appendCall(callbackPrefix + getContextPath() + "getTimestamp", strId); if (callback != null) { String key = org.directwebremoting.extend.CallbackHelperFactory.get().saveCallback(callback, Integer.class); script.appendCall("__System.activateCallback", key, "reply"); } ScriptSessions.addScript(script); } /** * Returns a list of all the keys in this cache instance. */ public void keys(org.directwebremoting.ui.Callback<Object[]> callback) { ScriptBuffer script = new ScriptBuffer(); String callbackPrefix = ""; if (callback != null) { callbackPrefix = "var reply = "; } script.appendCall(callbackPrefix + getContextPath() + "keys"); if (callback != null) { String key = org.directwebremoting.extend.CallbackHelperFactory.get().saveCallback(callback, Object[].class); script.appendCall("__System.activateCallback", key, "reply"); } ScriptSessions.addScript(script); } /** * Removes all references to documents contained in this cache. This cache is no longer usable after calling this method. */ public void destroy() { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "destroy"); ScriptSessions.addScript(script); } /** * Publishes an event to all subscribed objects. * @param objEvent the event, should have at least a field 'subject' that is the event id, another common field is 'target' (target will default to this instance) * @param callback the number of listeners to which the event was broadcast */ public void publish(jsx3.lang.Object objEvent, org.directwebremoting.ui.Callback<Integer> callback) { ScriptBuffer script = new ScriptBuffer(); String callbackPrefix = ""; if (callback != null) { callbackPrefix = "var reply = "; } script.appendCall(callbackPrefix + getContextPath() + "publish", objEvent); if (callback != null) { String key = org.directwebremoting.extend.CallbackHelperFactory.get().saveCallback(callback, Integer.class); script.appendCall("__System.activateCallback", key, "reply"); } ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(String strEventId, jsx3.lang.Object objHandler, String objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(String strEventId, org.directwebremoting.ui.CodeBlock objHandler, String objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(Object[] strEventId, jsx3.lang.Object objHandler, String objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(Object[] strEventId, String objHandler, String objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(Object[] strEventId, org.directwebremoting.ui.CodeBlock objHandler, org.directwebremoting.ui.CodeBlock objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(Object[] strEventId, org.directwebremoting.ui.CodeBlock objHandler, String objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(String strEventId, org.directwebremoting.ui.CodeBlock objHandler, org.directwebremoting.ui.CodeBlock objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(String strEventId, String objHandler, org.directwebremoting.ui.CodeBlock objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(Object[] strEventId, jsx3.lang.Object objHandler, org.directwebremoting.ui.CodeBlock objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(String strEventId, jsx3.lang.Object objHandler, org.directwebremoting.ui.CodeBlock objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(Object[] strEventId, String objHandler, org.directwebremoting.ui.CodeBlock objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Subscribes an object or function to a type of event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler if an object, the instance to notify of events (objFunction is required); if a string, the JSX id of the instance to notify of events (objFunction is required), must exist in the same Server; if a function, the function to call to notify of events (objFunction ignored) * @param objFunction if objHandler is a string or object then the function to call on that instance. either a function or a string that is the name of a method of the instance */ public void subscribe(String strEventId, String objHandler, String objFunction) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "subscribe", strEventId, objHandler, objFunction); ScriptSessions.addScript(script); } /** * Unsubscribe an object or function from an event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler the value of objHandler passed to subscribe */ public void unsubscribe(String strEventId, String objHandler) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "unsubscribe", strEventId, objHandler); ScriptSessions.addScript(script); } /** * Unsubscribe an object or function from an event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler the value of objHandler passed to subscribe */ public void unsubscribe(Object[] strEventId, jsx3.lang.Object objHandler) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "unsubscribe", strEventId, objHandler); ScriptSessions.addScript(script); } /** * Unsubscribe an object or function from an event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler the value of objHandler passed to subscribe */ public void unsubscribe(Object[] strEventId, String objHandler) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "unsubscribe", strEventId, objHandler); ScriptSessions.addScript(script); } /** * Unsubscribe an object or function from an event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler the value of objHandler passed to subscribe */ public void unsubscribe(Object[] strEventId, org.directwebremoting.ui.CodeBlock objHandler) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "unsubscribe", strEventId, objHandler); ScriptSessions.addScript(script); } /** * Unsubscribe an object or function from an event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler the value of objHandler passed to subscribe */ public void unsubscribe(String strEventId, jsx3.lang.Object objHandler) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "unsubscribe", strEventId, objHandler); ScriptSessions.addScript(script); } /** * Unsubscribe an object or function from an event published by this object. As of version 3.4 a string value for objHandler is deprecated. * @param strEventId the event type(s). * @param objHandler the value of objHandler passed to subscribe */ public void unsubscribe(String strEventId, org.directwebremoting.ui.CodeBlock objHandler) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "unsubscribe", strEventId, objHandler); ScriptSessions.addScript(script); } /** * Unsubscribes all subscribed objects to a type of event published by this object. * @param strEventId the event type */ public void unsubscribeAll(String strEventId) { ScriptBuffer script = new ScriptBuffer(); script.appendCall(getContextPath() + "unsubscribeAll", strEventId); } }
# Neural Abstractions # Copyright (c) 2022 Alessandro Abate, Alec Edwards, Mirco Giacobbe # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import unittest import torch import z3 import numpy as np from cegis.verifier import z3_replacements, Z3Verifier from cegis.nn import ReluNet from utils import * from cegis.translator import Translator from cli import get_default_config from benchmarks import read_benchmark config = get_default_config() class testTranslatorZ3(unittest.TestCase): def setUp(self) -> None: self.width = [3,3] bench = read_benchmark('lin') self.dimension = bench.dimension torch.manual_seed(0) self.net = ReluNet(bench, self.width, 0.1, config) x0, x1 = [z3.Real("x%d" % i) for i in range(self.dimension)] self.translator = Translator((x0, x1), Z3Verifier.relu) def test_output(self): output = self.translator.translate(self.net) def test_output_similarity(self): X = torch.randn(10, self.dimension) true_output = self.net.model(X).detach() translation = self.translator.translate(self.net, dp=20) translated_output = np.array( [ [ float( ( z3_replacements( translation[0, 0], self.translator.input_vars, np.array(x.unsqueeze(1)), ) ).as_fraction() ), float( ( z3_replacements( translation[1, 0], self.translator.input_vars, np.array(x.unsqueeze(1)), ) ).as_fraction() ), ] for x in X ] ) self.assertTrue(np.allclose(true_output, translated_output)) if __name__ == "__main__": unittest.main()
<reponame>saineshwar/Angular-7-Project-with-ASP.NET-CORE-APIS-<filename>gym-project/src/app/CreateUsers/app.UserRegistration.component.ts import { Component, OnInit } from '@angular/core'; import { Router } from '@angular/router'; import { DatePipe } from '@angular/common'; import { UserModel } from './Models/app.UserModel'; import { UserService } from './Services/app.UserRegistration.Service'; @Component({ templateUrl: './app.UserRegistration.html', styleUrls: ['../Content/vendor/bootstrap/css/bootstrap.min.css', '../Content/vendor/metisMenu/metisMenu.min.css', '../Content/dist/css/sb-admin-2.css', '../Content/vendor/font-awesome/css/font-awesome.min.css' ] }) export class UserRegistrationComponent implements OnInit { UserModel: UserModel = new UserModel(); private _userService; output: any; ngOnInit(): void { } constructor( private datePipe: DatePipe, private _Route: Router, private userService: UserService ) { this._userService = userService; } onSubmit() { this._userService.SaveUser(this.UserModel).subscribe( response => { this.output = response if (this.output.StatusCode == "409") { alert('User Already Exists'); } else if (this.output.StatusCode == "200") { alert('User Created Successfully'); this._Route.navigate(['/User/All']); } else { alert('Something Went Wrong'); } }); } }
import { ErrorCallback, OpenOptions, SerialPortStream } from '@serialport/stream' import { MockBinding, MockBindingInterface } from '@serialport/binding-mock' export type SerialPortMockOpenOptions = Omit<OpenOptions<MockBindingInterface>, 'binding'> export class SerialPortMock extends SerialPortStream<MockBindingInterface> { static list = MockBinding.list static readonly binding = MockBinding constructor(options: SerialPortMockOpenOptions, openCallback?: ErrorCallback) { const opts: OpenOptions<MockBindingInterface> = { binding: MockBinding, ...options, } super(opts, openCallback) } }
import React from 'react' import { Header } from '../../components/common' import { observer } from 'mobx-react' import RemoteBooksList from './RemoteShelve' import { AddBookButton } from './common' import LocalBooksList from './LocalShelve' import SwipeableViews from 'react-swipeable-views' import { useTheme } from '@material-ui/core/styles' import BottomNavigation from '@material-ui/core/BottomNavigation' import BottomNavigationAction from '@material-ui/core/BottomNavigationAction' import Zoom from '@material-ui/core/Zoom' import AddIcon from '@material-ui/icons/Add' import styled from 'styled-components' const Contaier = styled.div` position: relative; ` const Content = styled.div` overflow: auto; ` const FabsContainer = styled.div` position: fixed; right: 0; bottom: 0; ` const BottomMenu = styled.div` position: fixed; bottom: 0; right: 0; left: 0; ` const BottomMenuHeight = styled.div`` export default observer(function Shelves() { const theme = useTheme() const [value, setValue] = React.useState(0) const transitionDuration = { enter: theme.transitions.duration.enteringScreen, exit: theme.transitions.duration.leavingScreen, } const handleChangeIndex = (index: number) => setValue(index) const handleChange = (event: any, index: number) => handleChangeIndex(index) const fabs = [ { color: 'primary', icon: <AddIcon />, label: 'Add', clickHandler: () => { console.log('hey you') }, }, ] return ( <Contaier> <Header></Header> <Content> <SwipeableViews index={value} onChangeIndex={handleChangeIndex}> <LocalBooksList /> <RemoteBooksList /> <div>favorite</div> </SwipeableViews> </Content> <FabsContainer> {fabs.map((fab, index) => ( <Zoom key={fab.color} in={value === index} timeout={transitionDuration} style={{ transitionDelay: `${ value === index ? transitionDuration.exit : 0 }ms`, }} unmountOnExit > <AddBookButton /> </Zoom> ))} </FabsContainer> <BottomMenu> <BottomMenuHeight /> <BottomNavigation value={value} onChange={handleChange} showLabels> <BottomNavigationAction label="local" /> <BottomNavigationAction label="remote" /> <BottomNavigationAction label="fav" /> </BottomNavigation> </BottomMenu> </Contaier> ) })
<reponame>vagase/chat-bot-hub<filename>server/web/auth.go package web import ( "encoding/json" "fmt" "net/http" "time" "github.com/dgrijalva/jwt-go" "github.com/gorilla/context" "github.com/gorilla/sessions" "github.com/hawkwithwind/chat-bot-hub/server/dbx" "github.com/hawkwithwind/chat-bot-hub/server/utils" ) const ( SDK string = "SDK" USER string = "USER" SDKCODE string = "sdkbearer" ) type User struct { AccountName string `json:"accountname"` Password string `json:"password"` SdkCode string `json:"sdkcode"` Secret string `json:"secret"` ExpireAt utils.JSONTime `json:"expireat"` } func (o *ErrorHandler) getAccountName(r *http.Request) string { if o.Err != nil { return "" } var accountName string if accountNameptr, ok := context.GetOk(r, "login"); !ok { o.Err = fmt.Errorf("context.login is null") return "" } else { accountName = accountNameptr.(string) } return accountName } func (o *ErrorHandler) register(db *dbx.Database, name string, pass string, email string, avatar string) { if o.Err != nil { return } account := o.NewAccount(name, pass) account.SetEmail(email) account.SetAvatar(avatar) o.SaveAccount(db.Conn, account) } func (o *ErrorHandler) generateToken(s string, name string, sdkcode string, secret string, expireAt time.Time) string { if o.Err != nil { return "" } token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{ "accountname": name, "sdkcode": sdkcode, "secret": secret, "expireat": utils.JSONTime{expireAt}, }) var tokenstring string if tokenstring, o.Err = token.SignedString([]byte(s)); o.Err == nil { return tokenstring } else { return "" } } func (o *ErrorHandler) authorize(s string, name string, secret string) string { if o.Err != nil { return "" } return o.generateToken(s, name, "", secret, time.Now().Add(time.Hour*24*7)) } func (ctx *WebServer) sdkToken(w http.ResponseWriter, req *http.Request) { o := &ErrorHandler{} defer o.WebError(w) var accountName string switch login := context.Get(req, "login").(type) { case string: accountName = login default: o.Err = fmt.Errorf("context[login] should be string but [%T]%v", login, login) } account := o.GetAccountByName(ctx.db.Conn, accountName) tokenstring := o.generateToken(ctx.Config.SecretPhrase, account.AccountName, "sdkbearer", account.Secret, time.Now().Add(time.Hour*24*365)) o.ok(w, "", map[string]interface{}{ "sdkName": SDKCODE, "token": tokenstring, }) } func (ctx *WebServer) login(w http.ResponseWriter, req *http.Request) { o := &ErrorHandler{} defer o.WebError(w) var session *sessions.Session session, o.Err = ctx.store.Get(req, "chatbothub") var user User if o.Err == nil { o.Err = json.NewDecoder(req.Body).Decode(&user) } if o.AccountValidate(ctx.db.Conn, user.AccountName, user.Password) { tokenString := o.authorize(ctx.Config.SecretPhrase, user.AccountName, utils.PasswordCheckSum(user.Password)) session.Values["X-AUTHORIZE"] = tokenString session.Save(req, w) if o.Err == nil { http.Redirect(w, req, "/", http.StatusFound) } } else { o.deny(w, "用户名密码不匹配") } } func (ctx *WebServer) validate(next http.HandlerFunc) http.HandlerFunc { return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { o := &ErrorHandler{} defer o.WebError(w) var session *sessions.Session var bearerToken string = "" var clientType string = "" session, o.Err = ctx.store.Get(req, "chatbothub") if o.Err == nil { switch tokenString := session.Values["X-AUTHORIZE"].(type) { case string: if tokenString == "" { bearerToken = req.Header.Get("X-AUTHORIZE") clientType = req.Header.Get("X-CLIENT-TYPE") } else { bearerToken = tokenString clientType = USER } case nil: bearerToken = req.Header.Get("X-AUTHORIZE") clientType = req.Header.Get("X-CLIENT-TYPE") default: ctx.Error(fmt.Errorf("unexpected tokenstring %T", tokenString), "unexpected token") } } if o.Err == nil && bearerToken != "" { var token *jwt.Token token, o.Err = jwt.Parse(bearerToken, func(token *jwt.Token) (interface{}, error) { if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { return nil, fmt.Errorf("解析令牌出错") } return []byte(ctx.Config.SecretPhrase), nil }) if token.Valid { var user User utils.DecodeMap(token.Claims, &user) if o.AccountValidateSecret(ctx.db.Conn, user.AccountName, user.Secret) { if user.ExpireAt.Before(time.Now()) { o.deny(w, "身份令牌已过期") return } else { if clientType == SDK && user.SdkCode != SDKCODE { o.deny(w, "不支持的用户类型") return } // pass validate context.Set(req, "login", user.AccountName) next(w, req) } } else { o.deny(w, "身份令牌未验证通过") return } } else { o.deny(w, "身份令牌无效") return } } else { o.deny(w, "未登录用户无权限访问") return } }) }
def make_model(n_size, n_output, n_embedding, n_vocab): model = Sequential() model.add(Embedding(n_vocab, n_embedding, mask_zero=True)) model.add(LSTM(n_size)) model.add(Dense(n_output)) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) return model
Monday was chicken nugget day in the cafeteria at Tennessee’s Farragut High School. “You can’t go wrong with nugget day at high school,” senior Carson Koller told me. “The nuggets were great.” Click here to join Todd’s American Dispatch: a must-read for Conservatives! Carson, an Eagle Scout, was so hungry he got six nuggets instead of five. The 17-year-old had no idea that extra nugget would result in a trip to the principal’s office and make headlines across the fruited plain. Carson, who has a spotless academic record, was accused of stealing and was suspended for “theft of property” – all over a single chicken nugget that he had actually paid for. “I got a text from my son saying he had been suspended for one day for taking a chicken nugget,” Carson’s mom, Carrie Koller Waller told me. “I honestly didn’t believe it.” She immediately checked online and discovered that not only had he paid for the extra nugget, but the school had added an additional charge. Mrs. Waller alerted her Facebook friends to the Great Nugget Controversy of 2016 and posted a screenshot showing her son was no thief. “My Eagle Scout, Captain of the drumline, all-around hardworking and well-rounded teenager just got suspended from a day of school (and after school band practice) for taking an extra chicken nugget from the lunch line. Maybe I'm missing parts of the story, but in the past, the cafeteria has never hesitated to charge Carson double for the amount of food he's taken/eaten. Today he gets taken to the Principal's office for this. When I questioned the need to suspend over such an offense, I was told that they have to be consistent with people who take extra food and that somewhere in the Knox County Handbook there is something to this effect. I almost don't have words here.....but aren't there kids who get free lunches? Does my son really deserve suspension over hunger, especially when they have the ability to charge his lunch account for the items (which they did!!!!)? How is it theft if he paid for it??? It's food. FOOD!!! Not weapons. Not drugs. Not alcohol. Not cheating on a test. Not inappropriate clothing or profanity. Not fighting. Not calling in threats. Not vandalism. I am shaking my head over this and not sure what to do....laugh, punish, argue, dress him up as a nugget bandit, or let it go. Does the suspension matter on his records? #justnow#cantmakethisstuffup #mylife” “I didn’t think I’d ever be talking to the principal over a chicken nugget,” she told me. “That’s not something a parent ever imagines.” After the school district investigated Mrs. Waller’s evidence they realized they had made a terrible mistake. “Principal Siebe reviewed the matter and found there was some misinformation about the details, and after further investigation, corrected the situation,” a district spokesperson told USA Today. Carson was allowed to return to school and his official school record was expunged of the Great Nugget Controversy of 2016. “My main concern was they suspended him so quickly before getting all the facts,” Mrs. Waller said. “But I’m happy with how the principal reacted.” By the way, Carson was charged $2.75 for that extra nugget. That’s a mighty pricey nugget, folks. It is unfortunate the school not only overreacted, but falsely accused a good teenager of wrongdoing. But they made things right and they deserve credit for that. And Carson deserves a little credit, too. So I reached out to the local Chick-fil-A in Knoxville and arranged to provide Carson with a gift card -- so he can eat as many chicken nuggets as his heart desires. “Sounds like I’m going to have to treat my principal to lunch – on me,” Carson said. That’s one mighty good kid, America.
/** * Test that the parser handles the case that build tag is specified but empty and if the build tag is * specified but does not start with a number */ @Test public void buildTagException() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> PypiWheelArtifactCoordinates.parse("bcrypt-3.1.6-1--cp27-cp27m-macosx_10_6_intel.whl")); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> PypiWheelArtifactCoordinates.parse("bcrypt-3.1.6-1-test1-cp27-cp27m-macosx_10_6_intel.whl")); }
/** * Configure {@link Docket} * <p> * The base package value: * default value is {@link AutoConfigurationPackages#get} is used * * @return The {@link Docket} instance */ @Bean public Docket docket() { String basePackage = swaggerProperties.getBasePackage(); if (StringUtils.isEmpty(basePackage)) { basePackage = AutoConfigurationPackages.get(beanFactory).get(0); } return new Docket(DocumentationType.SWAGGER_2) .apiInfo(apiInfo()) .securitySchemes(Collections.singletonList(apiKey())) .securityContexts(Collections.singletonList(securityContext())) .select() .apis(RequestHandlerSelectors.basePackage(basePackage)) .paths(PathSelectors.any()) .build(); }
def loads(text, allow_multiple=True): g = GlycoCTXML.loads(text) first = next(g) if not allow_multiple: return first second = None try: second = next(g) collection = [first, second] collection.extend(g) return collection except StopIteration: return first
/** * @author <a href="mailto:[email protected]">Norman Maurer</a> */ public class BootstrapClientWithOptionsAndAttrs { public void bootstrap() { final AttributeKey<Integer> id = new AttributeKey<Integer>("ID"); Bootstrap bootstrap = new Bootstrap(); bootstrap.group(new NioEventLoopGroup()).channel(NioSocketChannel.class) .handler(new SimpleChannelInboundHandler<ByteBuf>() { @Override public void channelRegistered(ChannelHandlerContext ctx) throws Exception { Integer idValue = ctx.channel().attr(id).get(); // do something with the idValue } @Override protected void channelRead0(ChannelHandlerContext channelHandlerContext, ByteBuf byteBuf) throws Exception { System.out.println("Reveived data"); byteBuf.clear(); } }); bootstrap.option(ChannelOption.SO_KEEPALIVE, true).option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 5000); ChannelFuture future = bootstrap.connect(new InetSocketAddress("www.manning.com", 80)); future.syncUninterruptibly(); } }
def __VGG_Conv2DBlock(self, depth, kernelshape, activation, padding, channel_pos, x, conv_amount=3, inp_shape=None): bn_axis = 3 if inp_shape == None: x = Conv2D(depth, kernel_size=kernelshape, padding=padding, data_format=channel_pos)(x) else: x = Conv2D(depth, kernel_size=kernelshape, padding=padding, input_shape=inp_shape, data_format=channel_pos)( x) x = BatchNormalization(axis=bn_axis)(x) x = Activation(activation)(x) x = Conv2D(depth, kernel_size=kernelshape, padding=padding, data_format=channel_pos)(x) x = BatchNormalization(axis=bn_axis)(x) x = Activation(activation)(x) if conv_amount == 3: x = Conv2D(depth, kernel_size=kernelshape, padding=padding, data_format=channel_pos)(x) x = BatchNormalization(axis=bn_axis)(x) x = Activation(activation)(x) return x