content
stringlengths
10
4.9M
def ndarray_to_bytes(arr: np.ndarray) -> bytes: df = pd.DataFrame(data={'arr': arr}) buf = BytesIO() feather.write_feather(df, buf) buf.seek(0) return buf.read()
<reponame>isaka/coreutils // spell-checker:ignore abcdefghijklmnopqrstuvwxyz // * This file is part of the uutils coreutils package. // * // * For the full copyright and license information, please view the LICENSE // * file that was distributed with this source code. extern crate unindent; use self::unindent::*; use crate::common::util::*; use std::env; use std::fs::remove_file; use std::fs::File; use std::io::Write; use std::path::Path; // octal dump of 'abcdefghijklmnopqrstuvwxyz\n' // spell-checker:disable-line static ALPHA_OUT: &str = " 0000000 061141 062143 063145 064147 065151 066153 067155 070157 0000020 071161 072163 073165 074167 075171 000012 0000033 "; // XXX We could do a better job of ensuring that we have a fresh temp dir to ourselves, // not a general one full of other process leftovers. // Test that od can read one file and dump with default format #[test] fn test_file() { // TODO: Can this be replaced by AtPath? use std::env; let temp = env::temp_dir(); let tmpdir = Path::new(&temp); let file = tmpdir.join("test"); { let mut f = File::create(&file).unwrap(); // spell-checker:disable-next-line assert!( f.write_all(b"abcdefghijklmnopqrstuvwxyz\n").is_ok(), "Test setup failed - could not write file" ); } new_ucmd!() .arg("--endian=little") .arg(file.as_os_str()) .succeeds() .no_stderr() .stdout_is(unindent(ALPHA_OUT)); // Ensure that default format matches `-t o2`, and that `-t` does not absorb file argument new_ucmd!() .arg("--endian=little") .arg("-t") .arg("o2") .arg(file.as_os_str()) .succeeds() .no_stderr() .stdout_is(unindent(ALPHA_OUT)); let _ = remove_file(file); } // Test that od can read 2 files and concatenate the contents #[test] fn test_2files() { let temp = env::temp_dir(); let tmpdir = Path::new(&temp); let file1 = tmpdir.join("test1"); let file2 = tmpdir.join("test2"); for (n, a) in [(1, "a"), (2, "b")] { println!("number: {} letter:{}", n, a); } // spell-checker:disable-next-line for (path, data) in [(&file1, "abcdefghijklmnop"), (&file2, "qrstuvwxyz\n")] { let mut f = File::create(&path).unwrap(); assert!( f.write_all(data.as_bytes()).is_ok(), "Test setup failed - could not write file" ); } new_ucmd!() .arg("--endian=little") .arg(file1.as_os_str()) .arg(file2.as_os_str()) .succeeds() .no_stderr() .stdout_is(unindent(ALPHA_OUT)); // TODO: Handle errors? let _ = remove_file(file1); let _ = remove_file(file2); } // Test that od gives non-0 exit val for filename that doesn't exist. #[test] fn test_no_file() { let temp = env::temp_dir(); let tmpdir = Path::new(&temp); let file = tmpdir.join("}surely'none'would'thus'a'file'name"); // spell-checker:disable-line new_ucmd!().arg(file.as_os_str()).fails(); } // Test that od reads from stdin instead of a file #[test] fn test_from_stdin() { let input = "abcdefghijklmnopqrstuvwxyz\n"; // spell-checker:disable-line new_ucmd!() .arg("--endian=little") .run_piped_stdin(input.as_bytes()) .success() .no_stderr() .stdout_is(unindent(ALPHA_OUT)); } // Test that od reads from stdin and also from files #[test] fn test_from_mixed() { let temp = env::temp_dir(); let tmpdir = Path::new(&temp); let file1 = tmpdir.join("test-1"); let file3 = tmpdir.join("test-3"); // spell-checker:disable-next-line let (data1, data2, data3) = ("abcdefg", "hijklmnop", "qrstuvwxyz\n"); for (path, data) in [(&file1, data1), (&file3, data3)] { let mut f = File::create(&path).unwrap(); assert!( f.write_all(data.as_bytes()).is_ok(), "Test setup failed - could not write file" ); } new_ucmd!() .arg("--endian=little") .arg(file1.as_os_str()) .arg("-") .arg(file3.as_os_str()) .run_piped_stdin(data2.as_bytes()) .success() .no_stderr() .stdout_is(unindent(ALPHA_OUT)); } #[test] fn test_multiple_formats() { let input = "abcdefghijklmnopqrstuvwxyz\n"; // spell-checker:disable-line new_ucmd!() .arg("-c") .arg("-b") .run_piped_stdin(input.as_bytes()) .success() .no_stderr() .stdout_is(unindent( " 0000000 a b c d e f g h i j k l m n o p 141 142 143 144 145 146 147 150 151 152 153 154 155 156 157 160 0000020 q r s t u v w x y z \\n 161 162 163 164 165 166 167 170 171 172 012 0000033 ", )); } #[test] fn test_dec() { // spell-checker:ignore (words) 0xffu8 xffu let input = [ 0u8, 0u8, 1u8, 0u8, 2u8, 0u8, 3u8, 0u8, 0xffu8, 0x7fu8, 0x00u8, 0x80u8, 0x01u8, 0x80u8, ]; let expected_output = unindent( " 0000000 0 1 2 3 32767 -32768 -32767 0000016 ", ); new_ucmd!() .arg("--endian=little") .arg("-s") .run_piped_stdin(&input[..]) .success() .no_stderr() .stdout_is(expected_output); } #[test] fn test_hex16() { let input: [u8; 9] = [0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xff]; // spell-checker:disable let expected_output = unindent( " 0000000 2301 6745 ab89 efcd 00ff 0000011 ", ); // spell-checker:enable new_ucmd!() .arg("--endian=little") .arg("-x") .run_piped_stdin(&input[..]) .success() .no_stderr() .stdout_is(expected_output); } #[test] fn test_hex32() { let input: [u8; 9] = [0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xff]; let expected_output = unindent( " 0000000 67452301 efcdab89 000000ff 0000011 ", ); new_ucmd!() .arg("--endian=little") .arg("-X") .run_piped_stdin(&input[..]) .success() .no_stderr() .stdout_is(expected_output); } #[test] fn test_f16() { let input: [u8; 14] = [ 0x00, 0x3c, // 0x3C00 1.0 0x00, 0x00, // 0x0000 0.0 0x00, 0x80, // 0x8000 -0.0 0x00, 0x7c, // 0x7C00 Inf 0x00, 0xfc, // 0xFC00 -Inf 0x00, 0xfe, // 0xFE00 NaN 0x00, 0x84, ]; // 0x8400 -6.104e-5 let expected_output = unindent( " 0000000 1.000 0 -0 inf 0000010 -inf NaN -6.104e-5 0000016 ", ); new_ucmd!() .arg("--endian=little") .arg("-tf2") .arg("-w8") .run_piped_stdin(&input[..]) .success() .no_stderr() .stdout_is(expected_output); } #[test] fn test_f32() { let input: [u8; 28] = [ 0x52, 0x06, 0x9e, 0xbf, // 0xbf9e0652 -1.2345679 0x4e, 0x61, 0x3c, 0x4b, // 0x4b3c614e 12345678 0x0f, 0x9b, 0x94, 0xfe, // 0xfe949b0f -9.876543E37 0x00, 0x00, 0x00, 0x80, // 0x80000000 -0.0 0xff, 0xff, 0xff, 0x7f, // 0x7fffffff NaN 0xc2, 0x16, 0x01, 0x00, // 0x000116c2 1e-40 0x00, 0x00, 0x7f, 0x80, ]; // 0x807f0000 -1.1663108E-38 let expected_output = unindent( " 0000000 -1.2345679 12345678 -9.8765427e37 -0 0000020 NaN 1e-40 -1.1663108e-38 0000034 ", ); new_ucmd!() .arg("--endian=little") .arg("-f") .run_piped_stdin(&input[..]) .success() .no_stderr() .stdout_is(expected_output); } #[test] fn test_f64() { let input: [u8; 40] = [ 0x27, 0x6b, 0x0a, 0x2f, 0x2a, 0xee, 0x45, 0x43, // 0x4345EE2A2F0A6B27 12345678912345678 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x0000000000000000 0 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x80, // 0x8010000000000000 -2.2250738585072014e-308 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x0000000000000001 5e-324 (subnormal) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc0, ]; // 0xc000000000000000 -2 let expected_output = unindent( " 0000000 12345678912345678 0 0000020 -2.2250738585072014e-308 5e-324 0000040 -2.0000000000000000 0000050 ", ); new_ucmd!() .arg("--endian=little") .arg("-F") .run_piped_stdin(&input[..]) .success() .no_stderr() .stdout_is(expected_output); } #[test] fn test_multibyte() { new_ucmd!() .arg("-c") .arg("-w12") .run_piped_stdin("Universität Tübingen \u{1B000}".as_bytes()) // spell-checker:disable-line .success() .no_stderr() .stdout_is(unindent( " 0000000 U n i v e r s i t ä ** t 0000014 T ü ** b i n g e n \u{1B000} 0000030 ** ** ** 0000033 ", )); } #[test] fn test_width() { let input: [u8; 8] = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let expected_output = unindent( " 0000000 000000 000000 0000004 000000 000000 0000010 ", ); new_ucmd!() .arg("-w4") .arg("-v") .run_piped_stdin(&input[..]) .success() .no_stderr() .stdout_is(expected_output); } #[test] fn test_invalid_width() { let input: [u8; 4] = [0x00, 0x00, 0x00, 0x00]; let expected_output = unindent( " 0000000 000000 0000002 000000 0000004 ", ); new_ucmd!() .arg("-w5") .arg("-v") .run_piped_stdin(&input[..]) .success() .stderr_is_bytes("od: warning: invalid width 5; using 2 instead\n".as_bytes()) .stdout_is(expected_output); } #[test] fn test_zero_width() { let input: [u8; 4] = [0x00, 0x00, 0x00, 0x00]; let expected_output = unindent( " 0000000 000000 0000002 000000 0000004 ", ); new_ucmd!() .arg("-w0") .arg("-v") .run_piped_stdin(&input[..]) .success() .stderr_is_bytes("od: warning: invalid width 0; using 2 instead\n".as_bytes()) .stdout_is(expected_output); } #[test] fn test_width_without_value() { let input: [u8; 40] = [0; 40]; let expected_output = unindent(" 0000000 000000 000000 000000 000000 000000 000000 000000 000000 000000 000000 000000 000000 000000 000000 000000 000000 0000040 000000 000000 000000 000000 0000050 "); new_ucmd!() .arg("-w") .run_piped_stdin(&input[..]) .success() .no_stderr() .stdout_is(expected_output); } #[test] fn test_suppress_duplicates() { let input: [u8; 41] = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; let expected_output = unindent( " 0000000 00000000000 0000 0000 * 0000020 00000000001 0001 0000 0000024 00000000000 0000 0000 * 0000050 00000000000 0000 0000051 ", ); new_ucmd!() .arg("-w4") .arg("-O") .arg("-x") .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(expected_output); } #[test] fn test_big_endian() { let input: [u8; 8] = [0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; // 0xc000000000000000 -2 let expected_output = unindent( " 0000000 -2.0000000000000000 -2.0000000 0 c0000000 00000000 c000 0000 0000 0000 0000010 ", ); new_ucmd!() .arg("--endian=big") .arg("-F") .arg("-f") .arg("-X") .arg("-x") .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(expected_output); } #[test] #[allow(non_snake_case)] fn test_alignment_Xxa() { let input: [u8; 8] = [0x0A, 0x0D, 0x65, 0x66, 0x67, 0x00, 0x9e, 0x9f]; let expected_output = unindent( " 0000000 66650d0a 9f9e0067 0d0a 6665 0067 9f9e nl cr e f g nul rs us 0000010 ", ); // in this case the width of the -a (8-bit) determines the alignment for the other fields new_ucmd!() .arg("--endian=little") .arg("-X") .arg("-x") .arg("-a") .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(expected_output); } #[test] #[allow(non_snake_case)] fn test_alignment_Fx() { let input: [u8; 8] = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0]; // 0xc000000000000000 -2 let expected_output = unindent( " 0000000 -2.0000000000000000 0000 0000 0000 c000 0000010 ", ); // in this case the width of the -F (64-bit) determines the alignment for the other field new_ucmd!() .arg("--endian=little") .arg("-F") .arg("-x") .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(expected_output); } #[test] fn test_max_uint() { let input = [0xFFu8; 8]; let expected_output = unindent( " 0000000 1777777777777777777777 37777777777 37777777777 177777 177777 177777 177777 377 377 377 377 377 377 377 377 18446744073709551615 4294967295 4294967295 65535 65535 65535 65535 255 255 255 255 255 255 255 255 0000010 ", ); new_ucmd!() .arg("--format=o8") .arg("-Oobtu8") // spell-checker:disable-line .arg("-Dd") .arg("--format=u1") .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(expected_output); } #[test] fn test_hex_offset() { let input = [0u8; 0x1F]; let expected_output = unindent( " 000000 00000000 00000000 00000000 00000000 00000000 00000000 00000000 00000000 000010 00000000 00000000 00000000 00000000 00000000 00000000 00000000 00000000 00001F ", ); new_ucmd!() .arg("-Ax") .arg("-X") .arg("-X") .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(expected_output); } #[test] fn test_dec_offset() { let input = [0u8; 19]; let expected_output = unindent( " 0000000 00000000 00000000 00000000 00000000 00000000 00000000 00000000 00000000 0000016 00000000 00000000 0000019 ", ); new_ucmd!() .arg("-Ad") .arg("-X") .arg("-X") .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(expected_output); } #[test] fn test_no_offset() { let input = [0u8; 31]; const LINE: &str = " 00000000 00000000 00000000 00000000\n"; let expected_output = [LINE, LINE, LINE, LINE].join(""); new_ucmd!() .arg("-An") .arg("-X") .arg("-X") .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(expected_output); } #[test] fn test_invalid_offset() { new_ucmd!().arg("-Ab").fails(); } #[test] fn test_skip_bytes() { let input = "abcdefghijklmnopq"; // spell-checker:disable-line new_ucmd!() .arg("-c") .arg("--skip-bytes=5") .run_piped_stdin(input.as_bytes()) .no_stderr() .success() .stdout_is(unindent( " 0000005 f g h i j k l m n o p q 0000021 ", )); } #[test] fn test_skip_bytes_error() { let input = "12345"; new_ucmd!() .arg("--skip-bytes=10") .run_piped_stdin(input.as_bytes()) .failure(); } #[test] fn test_read_bytes() { let input = "abcdefghijklmnopqrstuvwxyz\n12345678"; // spell-checker:disable-line new_ucmd!() .arg("--endian=little") .arg("--read-bytes=27") .run_piped_stdin(input.as_bytes()) .no_stderr() .success() .stdout_is(unindent(ALPHA_OUT)); } #[test] fn test_ascii_dump() { let input: [u8; 22] = [ 0x00, 0x01, 0x0a, 0x0d, 0x10, 0x1f, 0x20, 0x61, 0x62, 0x63, 0x7d, 0x7e, 0x7f, 0x80, 0x90, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0, 0xf0, 0xff, ]; new_ucmd!() .arg("-tx1zacz") // spell-checker:disable-line .run_piped_stdin(&input[..]) .no_stderr() .success() .stdout_is(unindent( r" 0000000 00 01 0a 0d 10 1f 20 61 62 63 7d 7e 7f 80 90 a0 >...... abc}~....< nul soh nl cr dle us sp a b c } ~ del nul dle sp \0 001 \n \r 020 037 a b c } ~ 177 ** ** ** >...... abc}~....< 0000020 b0 c0 d0 e0 f0 ff >......< 0 @ P ` p del ** 300 320 340 360 377 >......< 0000026 ", )); } #[test] fn test_filename_parsing() { // files "a" and "x" both exists, but are no filenames in the command line below // "-f" must be treated as a filename, it contains the text: minus lowercase f // so "-f" should not be interpreted as a formatting option. new_ucmd!() .arg("--format") .arg("a") .arg("-A") .arg("x") .arg("--") .arg("-f") .succeeds() .no_stderr() .stdout_is(unindent( " 000000 m i n u s sp l o w e r c a s e sp 000010 f nl 000012 ", )); } #[test] fn test_stdin_offset() { let input = "abcdefghijklmnopq"; // spell-checker:disable-line new_ucmd!() .arg("-c") .arg("+5") .run_piped_stdin(input.as_bytes()) .no_stderr() .success() .stdout_is(unindent( " 0000005 f g h i j k l m n o p q 0000021 ", )); } #[test] fn test_file_offset() { new_ucmd!() .arg("-c") .arg("--") .arg("-f") .arg("10") .succeeds() .no_stderr() .stdout_is(unindent( r" 0000010 w e r c a s e f \n 0000022 ", )); } #[test] fn test_traditional() { // note gnu od does not align both lines let input = "abcdefghijklmnopq"; // spell-checker:disable-line new_ucmd!() .arg("--traditional") .arg("-a") .arg("-c") .arg("-") .arg("10") .arg("0") .run_piped_stdin(input.as_bytes()) .no_stderr() .success() .stdout_is(unindent( r" 0000010 (0000000) i j k l m n o p q i j k l m n o p q 0000021 (0000011) ", )); } #[test] fn test_traditional_with_skip_bytes_override() { // --skip-bytes is ignored in this case let input = "abcdefghijklmnop"; // spell-checker:disable-line new_ucmd!() .arg("--traditional") .arg("--skip-bytes=10") .arg("-c") .arg("0") .run_piped_stdin(input.as_bytes()) .no_stderr() .success() .stdout_is(unindent( r" 0000000 a b c d e f g h i j k l m n o p 0000020 ", )); } #[test] fn test_traditional_with_skip_bytes_non_override() { // no offset specified in the traditional way, so --skip-bytes is used let input = "abcdefghijklmnop"; // spell-checker:disable-line new_ucmd!() .arg("--traditional") .arg("--skip-bytes=10") .arg("-c") .run_piped_stdin(input.as_bytes()) .no_stderr() .success() .stdout_is(unindent( r" 0000012 k l m n o p 0000020 ", )); } #[test] fn test_traditional_error() { // file "0" exists - don't fail on that, but --traditional only accepts a single input new_ucmd!() .arg("--traditional") .arg("0") .arg("0") .arg("0") .arg("0") .fails(); } #[test] fn test_traditional_only_label() { let input = "abcdefghijklmnopqrstuvwxyz"; // spell-checker:disable-line new_ucmd!() .arg("-An") .arg("--traditional") .arg("-a") .arg("-c") .arg("-") .arg("10") .arg("0x10") .run_piped_stdin(input.as_bytes()) .no_stderr() .success() .stdout_is(unindent( r" (0000020) i j k l m n o p q r s t u v w x i j k l m n o p q r s t u v w x (0000040) y z y z (0000042) ", )); } #[test] fn test_od_invalid_bytes() { const INVALID_SIZE: &str = "x"; const INVALID_SUFFIX: &str = "1fb4t"; const BIG_SIZE: &str = "1Y"; // NOTE: // GNU's od (8.32) with option '--width' does not accept 'Y' as valid suffix. // According to the man page it should be valid in the same way it is valid for // '--read-bytes' and '--skip-bytes'. let options = [ "--read-bytes", "--skip-bytes", "--width", // "--strings", // TODO: consider testing here once '--strings' is implemented ]; for option in &options { new_ucmd!() .arg(format!("{}={}", option, INVALID_SIZE)) .arg("file") .fails() .code_is(1) .stderr_only(format!( "od: invalid {} argument '{}'", option, INVALID_SIZE )); new_ucmd!() .arg(format!("{}={}", option, INVALID_SUFFIX)) .arg("file") .fails() .code_is(1) .stderr_only(format!( "od: invalid suffix in {} argument '{}'", option, INVALID_SUFFIX )); #[cfg(not(target_pointer_width = "128"))] new_ucmd!() .arg(format!("{}={}", option, BIG_SIZE)) .arg("file") .fails() .code_is(1) .stderr_only(format!("od: {} argument '{}' too large", option, BIG_SIZE)); } }
On the Approximations of the Factors of Surface Segregation in Binary Alloys A new "Complex Calculation of Surface Segregation" (CCSS) method (presented elsewhere in greater detail) is first outlined and the factors of surface segregation are identified. The approximate description of these by subsequent structure model refinements is then discussed. It is shown by some considerations and by numerical results on the AgPd alloy as well, that such refinements lead to consequent diminutions of the surface free enthalpy. On this basis optimal values of calculational parameters, characteristic of surface conditions, may be calculated.
def collectRepoCounts(actionList, failOnError, queryDelay) : countMap = {} for i, action in enumerate(actionList) : owner, actionName = splitActionOwnerName(action) count = executeQuery(owner, actionName, failOnError) if count > 0 : countMap[actionName] = formatCount(count) if i+1 < len(actionList) : time.sleep(queryDelay) return countMap
Clinical parameters and bacteriological evaluation of Acess® as a periodontal therapeutic toothpaste: a non-blinded randomized clinical trial : Acess Ⓡ toothpaste includes formulation ingredients (latania, chamomile, and myrrh) and re-portedly improves periodontal disease by exerting gingival convergence and anti-inflammatory effects. Although each of these ingredients has antibacterial effects, the effects of Acess Ⓡ on the oral bacteria remain unknown. The present study was aimed at examining the effects of brushing teeth with Acess Ⓡ on periodontal disease and the oral bacterial population. This was a randomized, open-label, parallel, intergroup trial. Each participant was randomly assigned to receive either Acess Ⓡ or a control tooth paste formulation. Participants brushed their teeth twice daily, once in the morning and once in the evening, for five minutes per session. We then assessed clinical parameters such as gingival index (GI), pocket depth (PD), bleeding on probing (BOP) and plaque control record (PCR), and carried out salivary bacteriological examination at baseline and at two and four weeks post-baseline.
/** * Returns the maximum of two intervals. */ public static Interval max( Interval interval1, Interval interval2) { if (interval1 == null) { throw new InternalException("interval1 may not be null"); } if (interval2 == null) { throw new InternalException("interval2 may not be null"); } if (interval1.compareTo(interval2) >= 0) { return interval1; } return interval2; }
<reponame>jscyo/supertokens-node // @ts-nocheck import { RecipeInterface, VerifySessionOptions, TypeNormalisedInput, SessionInformation, KeyInfo, AntiCsrfType, } from "./types"; import Session from "./sessionClass"; import { Querier } from "../../querier"; declare class HandshakeInfo { antiCsrf: AntiCsrfType; accessTokenBlacklistingEnabled: boolean; accessTokenValidity: number; refreshTokenValidity: number; private rawJwtSigningPublicKeyList; constructor( antiCsrf: AntiCsrfType, accessTokenBlacklistingEnabled: boolean, accessTokenValidity: number, refreshTokenValidity: number, rawJwtSigningPublicKeyList: KeyInfo[] ); setJwtSigningPublicKeyList(updatedList: KeyInfo[]): void; getJwtSigningPublicKeyList(): KeyInfo[]; clone(): HandshakeInfo; } export default class RecipeImplementation implements RecipeInterface { querier: Querier; config: TypeNormalisedInput; handshakeInfo: HandshakeInfo | undefined; isInServerlessEnv: boolean; constructor(querier: Querier, config: TypeNormalisedInput, isInServerlessEnv: boolean); createNewSession: ({ res, userId, jwtPayload, sessionData, }: { res: any; userId: string; jwtPayload?: any; sessionData?: any; }) => Promise<Session>; getSession: ({ req, res, options, }: { req: any; res: any; options?: VerifySessionOptions | undefined; }) => Promise<Session | undefined>; getSessionInformation: ({ sessionHandle }: { sessionHandle: string }) => Promise<SessionInformation>; refreshSession: ({ req, res }: { req: any; res: any }) => Promise<Session>; revokeAllSessionsForUser: ({ userId }: { userId: string }) => Promise<string[]>; getAllSessionHandlesForUser: ({ userId }: { userId: string }) => Promise<string[]>; revokeSession: ({ sessionHandle }: { sessionHandle: string }) => Promise<boolean>; revokeMultipleSessions: ({ sessionHandles }: { sessionHandles: string[] }) => Promise<string[]>; getSessionData: ({ sessionHandle }: { sessionHandle: string }) => Promise<any>; updateSessionData: ({ sessionHandle, newSessionData, }: { sessionHandle: string; newSessionData: any; }) => Promise<void>; getJWTPayload: ({ sessionHandle }: { sessionHandle: string }) => Promise<any>; updateJWTPayload: ({ sessionHandle, newJWTPayload, }: { sessionHandle: string; newJWTPayload: any; }) => Promise<void>; getHandshakeInfo: (forceRefetch?: boolean) => Promise<HandshakeInfo>; /** * Update the cached list of signing keys * @param keyList The list of signing keys on the response object. Before 2.9 always undefined, after it always contains at least 1 key * @param publicKey The public key of the latest signing key * @param expiryTime The expiry time of the latest signing key */ updateJwtSigningPublicKeyInfo: (keyList: KeyInfo[] | undefined, publicKey: string, expiryTime: number) => void; getAccessTokenLifeTimeMS: () => Promise<number>; getRefreshTokenLifeTimeMS: () => Promise<number>; } export {};
import { isUndefined } from 'helpers' export const trim = (str: string): string => str && str.toString().replace(/^\s+|\s+$/g, '') export const clean = (str: string): string => ( str && str .toString() .replace(/\s+/gm, '') .replace(/_/gm, '') .replace(/-/gm, '') .replace(/[()]+/gm, '') ) || '' export const trimmed = (str: string): string => str && (!isUndefined(str) ? clean(str || '').toString() : '')
package leetcode import "sort" // https://leetcode-cn.com/problems/group-anagrams/ // 49. 字母异位词分组 // hash string 方法: // 1. sort string // 2. 用[26]int{} 记录每个字母出现次数 func groupAnagrams(strs []string) [][]string { hash := map[string][]string{} for _, str := range strs { key := []byte(str) sort.Slice(key, func(i, j int) bool { return key[i] < key[j] }) s := string(key) hash[s] = append(hash[s], str) } ret := make([][]string, 0, len(hash)) for _, v := range hash { ret = append(ret, v) } return ret }
/** * Add route * @param controllerKey A key can find controller * @param controllerClass Controller Class * @param viewPath View path for this Controller */ public Routes add(String controllerKey, Class<? extends Controller> controllerClass, String viewPath) { if (controllerKey == null) throw new IllegalArgumentException("The controllerKey can not be null"); controllerKey = controllerKey.trim(); if ("".equals(controllerKey)) throw new IllegalArgumentException("The controllerKey can not be blank"); if (controllerClass == null) throw new IllegalArgumentException("The controllerClass can not be null"); if (!controllerKey.startsWith("/")) controllerKey = "/" + controllerKey; if (map.containsKey(controllerKey)) throw new IllegalArgumentException("The controllerKey already exists: " + controllerKey); map.put(controllerKey, controllerClass); if (viewPath == null || "".equals(viewPath.trim())) viewPath = controllerKey; viewPath = viewPath.trim(); if (!viewPath.startsWith("/")) viewPath = "/" + viewPath; if (!viewPath.endsWith("/")) viewPath = viewPath + "/"; if (baseViewPath != null) viewPath = baseViewPath + viewPath; viewPathMap.put(controllerKey, viewPath); return this; }
-- | Pretty printing with class module GF.Text.Pretty(module GF.Text.Pretty,module PP) where import qualified Text.PrettyPrint as PP import Text.PrettyPrint as PP(Doc,Style(..),Mode(..),style,empty,isEmpty) class Pretty a where pp :: a -> Doc ppList :: [a] -> Doc ppList = fsep . map pp -- hmm instance Pretty Doc where pp = id instance Pretty Int where pp = PP.int instance Pretty Integer where pp = PP.integer instance Pretty Float where pp = PP.float instance Pretty Double where pp = PP.double instance Pretty Char where pp = PP.char; ppList = PP.text instance Pretty a => Pretty [a] where pp = ppList ppList = fsep . map pp -- hmm render x = PP.render (pp x) render80 x = renderStyle style{lineLength=80,ribbonsPerLine=1} x renderStyle s x = PP.renderStyle s (pp x) infixl 5 $$,$+$ infixl 6 <>,<+> x $$ y = pp x PP.$$ pp y x $+$ y = pp x PP.$+$ pp y x <+> y = pp x PP.<+> pp y x <> y = pp x PP.<> pp y braces x = PP.braces (pp x) brackets x = PP.brackets (pp x) cat xs = PP.cat (map pp xs) doubleQuotes x = PP.doubleQuotes (pp x) fcat xs = PP.fcat (map pp xs) fsep xs = PP.fsep (map pp xs) hang x d y = PP.hang (pp x) d (pp y) hcat xs = PP.hcat (map pp xs) hsep xs = PP.hsep (map pp xs) nest d x = PP.nest d (pp x) parens x = PP.parens (pp x) punctuate x ys = PP.punctuate (pp x) (map pp ys) quotes x = PP.quotes (pp x) sep xs = PP.sep (map pp xs) vcat xs = PP.vcat (map pp xs)
def _store_data(self, data): self._time_buffer.append(data[0]) self._store_signal_values(data[1])
// nettest.cc // Test out message delivery between two "Nachos" machines, // using the Post Office to coordinate delivery. // // Two caveats: // 1. Two copies of Nachos must be running, with machine ID's 0 and 1: // ./nachos -m 0 -o 1 & // ./nachos -m 1 -o 0 & // // 2. You need an implementation of condition variables, // which is *not* provided as part of the baseline threads // implementation. The Post Office won't work without // a correct implementation of condition variables. // // Copyright (c) 1992-1993 The Regents of the University of California. // All rights reserved. See copyright.h for copyright notice and limitation // of liability and disclaimer of warranty provisions. #include "copyright.h" #include "system.h" #include "network.h" #include "post.h" #include "interrupt.h" // Test out message delivery, by doing the following: // 1. send a message to the machine with ID "farAddr", at mail box #0 // 2. wait for the other machine's message to arrive (in our mailbox #0) // 3. send an acknowledgment for the other machine's message // 4. wait for an acknowledgement from the other machine to our // original message void MailTest(int farAddr) { PacketHeader outPktHdr, inPktHdr; MailHeader outMailHdr, inMailHdr; const char *data = "Hello there!"; const char *ack = "Got it!"; char buffer[MaxMailSize]; // construct packet, mail header for original message // To: destination machine, mailbox 0 // From: our machine, reply to: mailbox 1 outPktHdr.to = farAddr; outMailHdr.to = 0; outMailHdr.from = 1; outMailHdr.length = strlen(data) + 1; // Send the first message postOffice->Send(outPktHdr, outMailHdr, data); // Wait for the first message from the other machine postOffice->Receive(0, &inPktHdr, &inMailHdr, buffer); printf("Got \"%s\" from %d, box %d\n",buffer,inPktHdr.from,inMailHdr.from); fflush(stdout); // Send acknowledgement to the other machine (using "reply to" mailbox // in the message that just arrived outPktHdr.to = inPktHdr.from; outMailHdr.to = inMailHdr.from; outMailHdr.length = strlen(ack) + 1; postOffice->Send(outPktHdr, outMailHdr, ack); // Wait for the ack from the other machine to the first message we sent. postOffice->Receive(1, &inPktHdr, &inMailHdr, buffer); printf("Got \"%s\" from %d, box %d\n",buffer,inPktHdr.from,inMailHdr.from); fflush(stdout); // Then we're done! interrupt->Halt(); }
The evaluation of clitoral blood flow and sexual function in elite female athletes. INTRODUCTION Clitoral blood flow measurements using clitoral color Doppler ultrasound have been performed with increasing frequency either in order to assessment of female sexual function/dysfunction. The trials to evaluate the sexual function in healthy subjects, especially in the subgroup of female elite athletes, are limited. AIM The aim of the study was to evaluate whether elite female athletes and sedentary healthy females differ in their clitoral blood flow and sexual function as an expression of their physical fitness status. METHODS Twenty-five female elite athletes (Group I) and healthy female subjects (Group II) were enrolled as volunteers in the study. All women were instructed to complete the Female Sexual Function Index (FSFI) questionnaire. Each subject underwent high definition color Doppler ultrasonography to measure the clitoral blood flow parameters. Main Outcome Measures. The effect of physical activity on clitoral blood flow and sexual life in women. RESULTS Mean age, mean age of menarche, mean marriage age, and body mass index were similar for both groups. In Group I, the mean peak systolic velocity and end-diastolic velocity were higher than those of Group II, whereas the mean RI was similar for both groups. There were statistically significant differences for total FSFI score and all domain scores, except desire domain, between both groups. CONCLUSION In elite female athletes as compared with sedentary healthy females, better clitoral blood flow and better sexual function were demonstrated. Therefore it seems superior physical fitness correlates with better sexual function.
Waxangel Profile Blog Joined September 2002 United States 27032 Posts Last Edited: 2017-06-01 14:54:08 #1 Many Swedish players will represent their country at WCS Jönköping, but only Namshar has the honor of qualifying through the brutal European Challenger tournament. In this surprisingly Blizzard approved™ interview, I talked to Namshar about World of WarCraft, finding inspiration, and his journey to becoming a top player in one of StarCraft's traditional powerhouse nations. *This interview has been edited and condensed. Photo: R1CH Wax: To begin, could you tell us a little bit about yourself? Where you're from, how you got into StarCraft, stuff like that. Namshar: I grew up in a city called Halmstad on the south-west coast of Sweden. I've always loved playing games of all sorts since I was little. I actually played World of Warcraft for many years in my teen years. At some point one of my arena partners who I played with a lot convinced me over some time to go and pick up Wings of Liberty and play with him on there. I gave it a go and at first I played it very casually on the side, and it just kept growing on me since I've always been very competitive. I started enjoying it more and more as I advanced in the 1v1 leagues! How'd you end up choosing your current ID? Does it have anything to do with WarCraft? It does... My ID used to be Namash, and the creative 12-year old me was sitting down with my friends, creating my first WoW character. We started reading words backwards to try to discover something new and amazing. The class I created was a Shaman, so Namash is basically Shaman backwards if you switch places with the S and H. At some point I was transferring to a different realm in WoW to gather up with more friends, and Namash to my disbelief was already taken. So then I tried to brainstorm some similar-ish names to Namash, and eventually came up with Namshar. I thought it sounded fittingly fierce and cool for a big and dangerous Orc Shaman. Were you one of those guys that went enhance and prayed for windfury RNG, or were you a skillfull and helpful restoration player? For the longest time I was actually a dedicated and faithful Elemental Shaman, standing in the back and throwing Lightning Bolts. I used to always have the biggest prejudice about Enhancement players being nubs since they were always so annoying to play against when I wanted to freely cast my spells. In the last expansion I played I didn't like some of the changes they did to Elemental, so I finally betrayed my conviction and played Resto in that one ^^ How do you feel about the watchability of high level Arena WoW? Every BlizzCon I end up catching some of it, but I can never make any sense of it. I know it must take a ton of skill, but it's so hard to understand. Yeah... back when I was playing a lot, I was of course trying to catch most of it, trying to pick things up from the players. But even being a somewhat decent player myself I always thought it was a pretty big mess to follow. It's been a while since I've watched it last, but thinking back it kind of reminds me of Overwatch, which for me personally is pretty tough and stressful to spectate as well. You think any of the specific skills you picked up in WoW translated to you being good at StarCraft? To some degree yeah maybe... hotkey efficiency and mindset about hotkeys is something I was always very thorough about in WoW, anything to help me improve my game in the slightest. Maybe mindset about details would be a better way to put it. To not have anything hold back your game even if it's something small. Maybe the biggest trait that I feel WoW helped me develop was to actively criticize myself and my play. To recognize things I'm doing wrong or need to do better. Another thing that is also very important in Starcraft is to take inspiration from others, which is something I was always very diligent about in WoW. Really analyzing other players in detail will help you find flaws in your play that you didn't even know was there. Well, I was wondering how that might tie in to how you currently play SC2. I feel like one of the reasons your series against ShoWTimE at WCS Austin got such a good response wasn't just because it was close, but because you played an exciting style. Is that something you picked up from observation/study, or something else? Hmm, yeah. I actually got a lot of comments afterwards about my tendency towards Roaches. And although I did go for a big commitment with Roaches and Ravagers on Newkirk v.s ShoWTimE, after the game I instantly asked myself "what the hell am I doing?" It wasn't something that I was planning to do in my series at all, so I was pretty disappointed with myself about that. I think my lack of stage appearances made me doubt myself enough for me to change up my gameplan like that. Thankfully I was able to put my mind in the right place after that game. Roaches is more my way of trying to be extra safe vs all the different kind of adept attacks or all-ins Protoss can hit you with. If I think I'm getting all-in'd I'll make more, if I scout otherwise I'll make a lot less. Statistically I get a lot of wins by managing to defend big attacks well early on. My play vs Protoss is basically about my love for Hydras. I feel like the potential they have with army movement and strategy in the mid-game really suits me. After their health got buffed it encouraged me to practice a lot more hydra-based play again. I'm sure many have seen a lot of similarities in the way I use them and lurkers vs Protoss with what Nerchio used to do for a long time. With drops and so on. And there's definitely a lot to it. I implemented many aspects of his mid-game play in the hydra-lurker "era" to my own. As well as small things from other players that I have spotted and liked over time, combined with some preferences of my own. So a lot of my success with that playstyle definitely comes from inspiration from others! Talking about WCS Austin again, were you surprised with your final placement? You've been around the scene for a while, but the results only really started picking up in LotV, so I don't know what you expect from yourself. Well I have high goals for myself overall as a player, but I tried to go into the tournament with the goal to just play to the best of my ability. Because I knew that I'm able to beat anyone at that tournament if I play my A-game. So I didn't really put any expectations on myself to get a particular placing in the tournament. I just try to focus on always moving forward and always improving myself. If you just keep improving then the results will eventually follow. I had a pretty good vibe about Austin since I got to the Ro8 there last year, and even though my run ended in the Ro16 this time around I felt all right since I felt like I was able to show a lot of people what I can do. Is there an interesting story behind becoming a better player in LotV, like a particular moment that got you over the hump? Or is it just slow improvement over the years, and we just haven't noticed until now? It has been a slow and steady improvement over the years, but I think Legacy might have flipped a switch in me, simply because I really enjoy playing the game. More-so than any previous version of Starcraft 2. It's more fast-paced and action-packed, which I really like. And I think passion and enjoyment for a game is really key if you want to reach the top level. You need to put in a lot of hours and I think enjoyment really improves your practice and rate of improvement. Do you feel like speed and good mechanics are your biggest strengths? Or are the fast-paced games simply what you enjoy the most at a personal level? I'm reminded of former Brood War pro Hiya who was terrible at bio TvZ, but said he would always use it simply because it was the most fun way to play. I'd like to think that my speed and mechanics are at a good level, hopefully a bit better than Hiya then xD, but I'm not sure that it would be something that sets me apart from some other players. I just like when there's stuff happening and not too much downtime. If I think about strengths of mine then maybe it would be more about army movements and decision-making in the mid-game. I also always really enjoyed micro, but I'm not sure where I would rate myself there, seems like dangerous territory. So looking ahead to Jönköping, what do you have to work on? You said the lack of experience in that kind of live setting was a problem vs ShoWTimE. Will mentality be the key for you there? Naturally there's always things in-game that I want to address, but yeah I think mentality and confidence in myself has definitely held me back a bit overall and in certain matches. It's something that I've always strove to work on in different ways. For sure more experience in big tournaments is helping me in fixing that. Believing in yourself at all times is a very underrated skill I think. What do you think of the Namshar face from Austin? On one hand it's funny, but on the other hand I like to think it really reflects how tense that series was for you. Unless you're always like that. Hahaha, yeah I truly couldn't believe I would look the way I did so that really caught me off guard xD. Was a big "oh no..." kind of moment. But yea I got insanely amped up when I was finally able to show what I'd practiced and started to bring it back. Was an important moment for me in the tournament. And then I was like f***, I need to get my s*** together and focus on the next map. It can always be dangerous if you get too excited and shaky. Definitely was tense for me so I tried to lower my heartrate a bit. Bit of a funny side-note that not everyone might know, is that there is also an emote on Basetrade TV's channel of me doing the same face. This was added back when I wasn't at the level I am now and I was also streaming a lot. I knew that I loved talking about StarCraft so I wanted to try out some casting and approached BTTV and casted with them for a while! Was fun every time and it was great how open and friendly Rifkin was to it, but eventually I felt like it was taking too much energy away from my real dreams and goals as a player. I think casting for several hours is actually a lot more exhausting than people might think it is. So at Jönköping, you might end up being the Swedish hope. Do you think that's kinda crazy, given the kind of players who were in that position in the past, like NaNiwa and ThorZain? It does actually feel pretty strange sometimes to see how far I've come, considering for how long I was watching and looking up to those players in the past. Of course Sweden has many good Zergs with Zanster and SortOf both being really strong. I hope we can all perform for the Swedish fans in Jönköping. But certainly being the top Swede has always been a big goal of mine, so I hope I can live up to that in Jönköping! Alright you've taken given so many serious answers, so I'll just throw in the pirate question to end. If your loved ones were kidnapped by pirates, and you could recruit three progamers to take with you on a rescue mission, who would you take and why? Hmm. Definitely TLO to devise a (creative?) masterplan... Big biceps ShoWTimE as well to deal with the buff pirates. My third recruit would probably be Harstem for high morale and top banter. Alright, any last comments, shoutouts, things you wanna say? Shoutout to you, these interviews are great even if my reponces were dull and heavy. Also to my family and my girlfriend who are always cheering me on. And to Dead Pixels, they're lovely. Thanks! Make sure to check out Namshar's match vs ShoWTimE from WCS Austin to see what kind of play he's capable of showing in high-stakes matches. You can follow Namshar on Twitter at @dPixNamshar, and you can find his competitive statistics on Aligulac. You can read more interviews with the WCS Challengers who will compete at WCS Jönköping during June 17-19! Seither iaguz Namshar JonSnow Winter Semper Serral More interviews are coming soon™ Namshar: I grew up in a city called Halmstad on the south-west coast of Sweden. I've always loved playing games of all sorts since I was little. I actually played World of Warcraft for many years in my teen years.At some point one of my arena partners who I played with a lot convinced me over some time to go and pick up Wings of Liberty and play with him on there. I gave it a go and at first I played it very casually on the side, and it just kept growing on me since I've always been very competitive. I started enjoying it more and more as I advanced in the 1v1 leagues!It does... My ID used to be Namash, and the creative 12-year old me was sitting down with my friends, creating my first WoW character. We started reading words backwards to try to discover something new and amazing. The class I created was a Shaman, so Namash is basically Shaman backwards if you switch places with the S and H. At some point I was transferring to a different realm in WoW to gather up with more friends, and Namash to my disbelief was already taken. So then I tried to brainstorm some similar-ish names to Namash, and eventually came up with Namshar. I thought it sounded fittingly fierce and cool for a big and dangerous Orc Shaman.For the longest time I was actually a dedicated and faithful Elemental Shaman, standing in the back and throwing Lightning Bolts. I used to always have the biggest prejudice about Enhancement players being nubs since they were always so annoying to play against when I wanted to freely cast my spells.In the last expansion I played I didn't like some of the changes they did to Elemental, so I finally betrayed my conviction and played Resto in that one ^^Yeah... back when I was playing a lot, I was of course trying to catch most of it, trying to pick things up from the players. But even being a somewhat decent player myself I always thought it was a pretty big mess to follow. It's been a while since I've watched it last, but thinking back it kind of reminds me of Overwatch, which for me personally is pretty tough and stressful to spectate as well.To some degree yeah maybe... hotkey efficiency and mindset about hotkeys is something I was always very thorough about in WoW, anything to help me improve my game in the slightest. Maybe mindset about details would be a better way to put it. To not have anything hold back your game even if it's something small.Maybe the biggest trait that I feel WoW helped me develop was to actively criticize myself and my play. To recognize things I'm doing wrong or need to do better. Another thing that is also very important in Starcraft is to take inspiration from others, which is something I was always very diligent about in WoW. Really analyzing other players in detail will help you find flaws in your play that you didn't even know was there.Hmm, yeah. I actually got a lot of comments afterwards about my tendency towards Roaches. And although I did go for a big commitment with Roaches and Ravagers onafter the game I instantly asked myself "what the hell am I doing?" It wasn't something that I was planning to do in my series at all, so I was pretty disappointed with myself about that.I think my lack of stage appearances made me doubt myself enough for me to change up my gameplan like that. Thankfully I was able to put my mind in the right place after that game. Roaches is more my way of trying to be extra safe vs all the different kind of adept attacks or all-ins Protoss can hit you with. If I think I'm getting all-in'd I'll make more, if I scout otherwise I'll make a lot less. Statistically I get a lot of wins by managing to defend big attacks well early on.My play vs Protoss is basically about my love for Hydras. I feel like the potential they have with army movement and strategy in the mid-game really suits me. After their health got buffed it encouraged me to practice a lot more hydra-based play again.I'm sure many have seen a lot of similarities in the way I use them and lurkers vs Protoss with what Nerchio used to do for a long time. With drops and so on. And there's definitely a lot to it. I implemented many aspects of his mid-game play in the hydra-lurker "era" to my own. As well as small things from other players that I have spotted and liked over time, combined with some preferences of my own. So a lot of my success with that playstyle definitely comes from inspiration from others!Well I have high goals for myself overall as a player, but I tried to go into the tournament with the goal to just play to the best of my ability. Because I knew that I'm able to beat anyone at that tournament if I play my A-game. So I didn't really put any expectations on myself to get a particular placing in the tournament. I just try to focus on always moving forward and always improving myself. If you just keep improving then the results will eventually follow. I had a pretty good vibe about Austin since I got to the Ro8 there last year, and even though my run ended in the Ro16 this time around I felt all right since I felt like I was able to show a lot of people what I can do.It has been a slow and steady improvement over the years, but I think Legacy might have flipped a switch in me, simply because I really enjoy playing the game. More-so than any previous version of Starcraft 2. It's more fast-paced and action-packed, which I really like. And I think passion and enjoyment for a game is really key if you want to reach the top level. You need to put in a lot of hours and I think enjoyment really improves your practice and rate of improvement.I'd like to think that my speed and mechanics are at a good level, hopefully a bit better than Hiya then xD, but I'm not sure that it would be something that sets me apart from some other players. I just like when there's stuff happening and not too much downtime. If I think about strengths of mine then maybe it would be more about army movements and decision-making in the mid-game. I also always really enjoyed micro, but I'm not sure where I would rate myself there, seems like dangerous territory.Naturally there's always things in-game that I want to address, but yeah I think mentality and confidence in myself has definitely held me back a bit overall and in certain matches. It's something that I've always strove to work on in different ways. For sure more experience in big tournaments is helping me in fixing that. Believing in yourself at all times is a very underrated skill I think.Hahaha, yeah I truly couldn't believe I would look the way I did so that really caught me off guard xD. Was a big "oh no..." kind of moment. But yea I got insanely amped up when I was finally able to show what I'd practiced and started to bring it back. Was an important moment for me in the tournament. And then I was like f***, I need to get my s*** together and focus on the next map. It can always be dangerous if you get too excited and shaky. Definitely was tense for me so I tried to lower my heartrate a bit.Bit of a funny side-note that not everyone might know, is that there is also anof me doing the same face. This was added back when I wasn't at the level I am now and I was also streaming a lot. I knew that I loved talking about StarCraft so I wanted to try out some casting and approached BTTV and casted with them for a while! Was fun every time and it was great how open and friendly Rifkin was to it, but eventually I felt like it was taking too much energy away from my real dreams and goals as a player. I think casting for several hours is actually a lot more exhausting than people might think it is.It does actually feel pretty strange sometimes to see how far I've come, considering for how long I was watching and looking up to those players in the past. Of course Sweden has many good Zergs with Zanster and SortOf both being really strong. I hope we can all perform for the Swedish fans in Jönköping. But certainly being the top Swede has always been a big goal of mine, so I hope I can live up to that in Jönköping!Hmm. Definitely TLO to devise a (creative?) masterplan... Big biceps ShoWTimE as well to deal with the buff pirates. My third recruit would probably be Harstem for high morale and top banter.Shoutout to you, these interviews are great even if my reponces were dull and heavy. Also to my family and my girlfriend who are always cheering me on. And to Dead Pixels, they're lovely. Thanks!You can read more interviews with the WCS Challengers who will compete at WCS Jönköping during June 17-19!More interviews are coming soon™ Administrator Hey HP can you redo everything youve ever done because i have a small complaint?
#include <stdio.h> long long int n; int main() { scanf("%lli", &n); long long int min_k; long long int max_illegal = n, min_legal = 0; long long int morning, evening; while(max_illegal - min_legal > 1) { min_k = (max_illegal + min_legal) >> 1; // printf("%lli %lli, %lli", max_illegal, min_k, min_legal); long long int temp = n; morning = 0; evening = 0; while(temp > 0) { if(temp > min_k) { morning += min_k; temp -= min_k; evening += temp / 10; temp -= temp / 10; } else { morning += temp; temp = 0; } // printf("%lli %lli\n", morning, evening); } if(morning >= evening) { max_illegal = min_k; } else { min_legal = min_k; } } printf("%lli\n", max_illegal); return 0; }
x,a,b = list(map(int, input().split())) c,y,d = list(map(int, input().split())) e,f,z = list(map(int, input().split())) s = int((a+b+c+d+e+f)/2) print(s-(a+b),a,b) print(c,s-(c+d),d) print(e,f,s-(e+f))
Effect of insulin-like growth factor 1 receptor inhibitor on sensitization of head and neck cancer cells to cetuximab and methotrexate. 6079 Background: Insulin-like growth factor 1 receptor (IGF1R) is highly expressed in head and neck squamous cell carcinoma (HNSCC) and IGF1R inhibitors have been shown to modulate sensitivity to selected chemotherapeutic agents and radiation. The combination effects of an IGF1R inhibitor, MK-0646, with cetuximab or cytotoxic agents that are commonly used in the treatment of recurrent and/or metastatic HNSCC were examined in cetuximab resistant and sensitive HNSCC cell lines. METHODS The cell lines, SCC1 and its cetuximab-resistant clone 1Cc8, were treated with MK-0646, cetuximab or methotrexate, and a combination of MK-0646 and each anti-cancer drug (MK-0646 was supplied by Merck & Co., Inc.). The effect of treatments on cell proliferation and anti-tumor activity was determined using MTS assay in vitro and in vivo using mouse xenografts generated from the cell lines. Overall changes in the gene and protein expressions with the treatments were determined by DNA microarrays and western blots. RESULTS The IGF1R inhibitor, MK-0646, showed high-sensitivity in vitro xenograft model in SCC1 as monotherapy and increased sensitivity to cetuximab in SCC1 and to methotrexate in 1Cc8 in combination. However, MK-0646 did not inhibit cell proliferation in vitro and in vivo in 1Cc8. The gene expression array and western blot analyses showed that MK-0646 decreased expression of AKT and dihydrofolate reductase (DHFR), a target of methotrexate. Increased expressions of AKT and DHFR have been shown to associate with cetuximab and methotrexate resistance as well as radiation resistance. CONCLUSIONS The development of tolerance in response to the IGF1R inhibitor and cetuximab is common. Whereas IGF1R inhibitors may have little therapeutic impact in cetuximab resistant, the IGF1R inhibitor may modulate response to selected chemotherapeutic agents and to radiation. The IGF1R inhibitor appears to enhance cetuximab and methotrexate response, and modulates genes associated with radiation resistance thereby providing alternative regimens for recurrent and refractory HNSCC patients who have developed resistance to initial therapies. No significant financial relationships to disclose.
Ft. Collins, Colo. – As the days grow colder and the nights get long, New Belgium Brewing has unleashed one of the burliest beers of their Hop Kitchen series with 100 IBU’s of beautiful black bitterness. Hop Stout entices with a sweet, malty depth before a wicked bite of hops kicks in with the force of an imperial stout grappling with an IPA. Hop Stout pours a deep, delicious chocolate brown with a creamy head atop. Dark chocolate, roasted coffee and toasted bread notes emerge from the blending of eight different grains and six hop varieties, which accent the richness with a citrusy, woodsy pop. Equinox, Chinook and Willamette are just a few of the hops that arm this unique stout. “We wanted a wickedly hoppy and deeply malty stout to celebrate the fall season,” said Grady Hull, Assistant Brewmaster of New Belgium Brewing. “Hop Stout draws you into the malty woodwork of a traditional stout and then the hops storm in. It’s a really incredible, complex beer for those seeking a little more adventure with the change of seasons.” At 8 percent ABV and 100 IBUs, Hop Stout isn’t for the faint of heart, but it’s for malt and hop lovers alike. Hop Stout is now available on draft. To find Hop Stout near you, use the New Belgium Libation Location tool: NewBelgium.com/Beer/Finder. For more information about New Belgium Brewing, visit NewBelgium.com. You can also follow New Belgium on Facebook at Facebook.com/NewBelgium and Twitter @NewBelgium. About New Belgium Brewing Company New Belgium Brewing, makers of Fat Tire Amber Ale and a host of Belgian-inspired beers, is recognized as one of Outside Magazine’s Best Places to Work and one of the Wall Street Journal’s Best Small Businesses. The 100% employee-owned brewery is a Platinum-level Bicycle Friendly Business as designated by the League of American Bicyclists, one of World Blu’s Most Freedom-Centered Workplaces, and a Certified B Corp. In addition to Fat Tire, New Belgium brews ten year-round beers; Ranger IPA, Rampant Imperial IPA, Shift Pale Lager, Slow Ride Session IPA, Snapshot Wheat, Sunshine Wheat, 1554 Black Lager, Blue Paddle Pilsner, Abbey Belgian Ale and Trippel. Learn more at NewBelgium.com.
// Setup API DSL roots. func init() { design.Design = design.NewAPIDefinition() design.GeneratedMediaTypes = make(design.MediaTypeRoot) design.ProjectedMediaTypes = make(design.MediaTypeRoot) dslengine.Register(design.Design) dslengine.Register(design.GeneratedMediaTypes) }
// secretsUpdater is an internal wrapper over kube secret operations. func (k *Config) secretsUpdater(secretList map[string]data.SecretAttribute) error { if len(k.secretObject.Data) == 0 { k.secretObject.Data = make(map[string][]byte) } for secretKey, secretAttributes := range secretList { k.secretObject.Data[secretKey] = []byte(secretAttributes.Value) if secretAttributes.Value == "" || secretAttributes.MarkedForDeletion { delete(k.secretObject.Data, secretKey) } } annotations := k.secretObject.GetAnnotations() if len(annotations) == 0 { annotations = make(map[string]string) } annotations["dateUpdated"] = time.Now().Format(time.RFC3339) k.secretObject.SetAnnotations(annotations) if !k.KubeSecretExists { return k.secretCreator() } return k.secretUpdater() }
<filename>config.go<gh_stars>1-10 package main import ( "encoding/json" "io/ioutil" ) type Config struct { ClientID string `json:"client_id"` ClientSecret string `json:"client_secret"` SessionSecret string `json:"session_secret"` } func passConfig(location string) (Config, error) { var err error var c Config bytes, err := ioutil.ReadFile(location) if err != nil { return c, err } err = json.Unmarshal(bytes, &c) if err != nil { return c, err } return c, nil }
Vehicles are left stranded on Texas State Highway 288 in Houston, Texas on May 26, 2015. Heavy rains throught Texas put the city of Houston under massive amounts of water, closing roadways and trapping residents in their cars and buildings, according to local reports. Rainfall reached up to 11 inches (27.9cm) in some parts of the state, according to national forecasters, and the heavy rains quickly pooled over the state's already saturated soil. AFP PHOTO/AARON M. SPRECHER (Photo credit should read Aaron M. Sprecher/AFP/Getty Images) As floodwaters ravage Texas and Oklahoma, a new analysis finds that heavy downpours have increased dramatically since 1950. And scientists project that precipitation patterns will become increasingly erratic as the climate changes. The Northeast had a 31 percent increase in heavy downpours between the 1950 to 1959 period and the 2005 to 2014 period. The Midwest had a 16 percent increase between those periods, according to a report from climate news and data website Climate Central. Individual states have seen even more dramatic changes in rainfall. Rhode Island had a 104 percent increase, while Maine had a 61 percent increase. The report also notes that rainfall is highly localized; levels can vary widely by metropolitan area, even within a state. McAllen, Texas, for example, saw a 700 percent increase, while the increase for the state as a whole was much more modest. The analysis considers heavy downpours to be events that fall within the top 1 percent of daily precipitation totals for the 65 years of data they collected. Climate Central also created an interactive map showing the changing precipitation patterns over time. Extreme rainfall events are projected to get even worse as the climate warms, the report says. "Climate scientists predict that the recent trends toward more heavy downpours will continue throughout this century," it concludes. "Climate models predict that if carbon emissions continue to increase as they have in recent decades, the types of downpours that used to happen once every 20 years could occur every 4 to 15 years by 2100."
<reponame>star-tech-dev/passwords-frontend<filename>src/components/ui/password-field/index.tsx import React, { forwardRef, useEffect, useState } from 'react' import { GeneratorMode } from '../../../store/app/types' import { openModal } from '../../../store/modals/events' import { $modals } from '../../../store/modals/store' import { onOpenGenerator, onSaveGenerated } from '../../../store/app/events' import UIInput, { InputOptions } from '../input' import IconKey from '../../icons/key' import IconEye from '../../icons/eye' import IconEyeCrossed from '../../icons/eye-crossed' import './_index.scss' interface PasswordFieldProps extends InputOptions { onGenerate?: Function, generator?: boolean } const PasswordField = forwardRef((props: PasswordFieldProps, ref: any) => { const [fieldType, setFieldType] = useState<'password' | 'text'>('password') const [localError, setLocalError] = useState(props.error) const innerRef = React.createRef() const fieldProps = () => { const _props = { ...props } delete _props.onGenerate delete _props.generator return _props } const toggleFieldType = () => { setFieldType(fieldType === 'password' ? 'text' : 'password') } const openGenerator = () => { onOpenGenerator(GeneratorMode.field) openModal('password_generator') } const onInput = (e: React.ChangeEvent) => { props.onInput && props.onInput(e) setLocalError('') } const onBlur = (e: any) => { props.onBlur && props.onBlur(e) if (!props.value) { setLocalError('') } } useEffect(() => { try { ref.current = innerRef.current } catch (e) {} }, [ref]) useEffect(() => { setLocalError(props.error) }, [props.error]) useEffect(() => { $modals.on(onSaveGenerated, (state, value) => { props.onGenerate && props.onGenerate(value) return state }) return () => { $modals.off(onSaveGenerated) } }, []) return ( <div className="component -password-field"> <div className="icons flex a-center"> {props.generator && <div className="icon-container -icon -key" onClick={openGenerator}> <IconKey/> </div>} {!localError && <div className="icon-container -eye" onClick={toggleFieldType}> {fieldType === 'password' ? <IconEye/> : <IconEyeCrossed/>} </div>} </div> <UIInput {...fieldProps()} ref={innerRef} type={fieldType} error={localError} name="password" onInput={onInput} onBlur={onBlur}> {props.children} </UIInput> </div> ) }) PasswordField.displayName = 'PasswordField' export default PasswordField
PORTLAND, Ore. — Portland Thorns FC defender Sarah Huffman today announced her retirement from professional soccer. Huffman retires after playing six professional seasons, including the 2014 season with Thorns FC in the National Women’s Soccer League (NWSL). “Soccer has been one of the biggest loves of my life. While I'm sad to be stepping away from it at the professional level, I will always stay involved at some capacity,” said Huffman. “I have learned the most valuable life lessons through the sport and will forever cherish the journeys it took me on. I am beyond excited to find what I hope will be my next love at Nike. I am excited about the opportunities ahead and will forever have soccer to thank for molding me into the person I am. “A special thank you to the entire Portland Thorns organization. There was no better way to end a career than in your own backyard and in front of the best fans in the world. It was an honor to play for the club. You can guarantee our family will be season-ticket holders for life.” Huffman, 30, was acquired in a trade from the Western New York Flash on April 7 and appeared in 22 regular-season matches (nine starts) for Portland, recording one goal and one assist during the 2014 season. She tallied her first career NWSL goal in a 2-2 draw against the Chicago Red Stars on July 4. “I want to thank Sarah for all her contributions to the club,” said Thorns FC general manager Gavin Wilkinson. “A seasoned professional, she brought leadership and experience to our team during her time here in Portland. We understand her decision and wish her all the best in her future endeavors.” During the inaugural NWSL season in 2013, Huffman appeared in 14 matches (11 starts) for the Flash and started each of Western New York’s two playoff games, playing 72 minutes in the championship match. The top pick in the 2008 Women’s Professional Soccer (WPS) draft by the Washington Freedom, Huffman played six professional seasons. From 2009-11, Huffman played with the Freedom and magicJack of WPS and also played professionally in Norway in 2008 with Røa IL, helping lead the team to a Norwegian Women’s Cup. “Sarah will be sorely missed. She had a terrific season and was a top-class professional, both on and off the field,” said Thorns FC head coach Paul Riley. “Her soccer career was memorable and successful. Her relentless pursuit of her dream is totally admirable. I'm thankful to have coached her and I'm sure she was ecstatic to finish her career in such a fantastic soccer environment. She will be a huge success in the next stage of her life and we wish her all the best.” At the international level, Huffman was a member of U.S. youth national teams from U-16 through the U-23 level and made her debut with the senior national team in July 2010 against Sweden. In 2002, Huffman helped the United States win the FIFA U-19 Women’s World Championships. A two time All-American, Huffman recorded 28 assists and 12 goals during her career at the University of Virginia and is among the career leaders in assists for the Cavaliers.
// deleteAuthorizedApp is a helper for ensuring an authorized app is deleted. func deleteAuthorizedApp(db *database.Database, key string) error { app, err := db.FindAuthorizedAppByAPIKey(key) if err != nil { if database.IsNotFound(err) { return nil } return fmt.Errorf("failed to lookup api key: %w", err) } return db.RawDB(). Unscoped(). Where("id = ?", app.ID). Delete(&database.AuthorizedApp{}). Error }
<filename>src/14000/14939.cpp17.cpp #include <bits/stdc++.h> using namespace std; int dx[]={0, 0, 1, -1}; int dy[]={1, -1, 0, 0}; char arr[12][12], cpy[12][12]; void turn(int i, int j) { cpy[i][j]=(cpy[i][j]=='O'?'#':'O'); for(int k=4; k--;) { int nx=i+dx[k], ny=j+dy[k]; cpy[nx][ny]=(cpy[nx][ny]=='O'?'#':'O'); } } int solve() { int ans=1e9; for(int c=0; c<1024; c++) { int cnt=0; memcpy(cpy, arr, sizeof(cpy)); for(int i=0; i<10; i++) { if((c & (1<<i))!=0) { cnt++; turn(1, i+1); } } for(int i=2; i<=10; i++) { for(int j=1; j<=10; j++) { if(cpy[i-1][j]=='O') { cnt++; turn(i, j); } } } for(int i=1; i<=10; i++) { if(cpy[10][i]=='O') break; if(i==10) ans=min(ans, cnt); } } return ans==1e9?-1:ans; } int main() { for(int i=1; i<=10; i++) { cin>>arr[i]+1; } cout<<solve(); }
import React from 'react'; import PropTypes from 'prop-types'; import {Renderer, RendererProps} from '../../factory'; import {observer} from 'mobx-react'; import {FormStore, IFormStore} from '../../store/form'; import {Api, SchemaNode, Schema, Action, ApiObject, Payload} from '../../types'; import {filter, evalExpression} from '../../utils/tpl'; import cx from 'classnames'; import getExprProperties from '../../utils/filter-schema'; import { promisify, difference, until, noop, isObject, isVisible, createObject, extendObject } from '../../utils/helper'; import debouce = require('lodash/debounce'); import flatten = require('lodash/flatten'); import find = require('lodash/find'); import Scoped, { ScopedContext, IScopedContext, ScopedComponentType } from '../../Scoped'; import {IComboStore} from '../../store/combo'; import qs = require('qs'); import {dataMapping} from '../../utils/tpl-builtin'; import {isApiOutdated, isEffectiveApi} from '../../utils/api'; export type FormGroup = FormSchema & { title?: string; className?: string; }; export type FormGroupNode = FormGroup | FormGroupArray; export interface FormGroupArray extends Array<FormGroupNode> {} export interface FormSchema { fieldSetClassName?: string; tabsClassName?: string; controls?: SchemaNode; tabs?: FormGroupNode; fieldSet?: FormGroupNode; } export interface FormHorizontal { leftFixed?: boolean | string; left: string | number; right: string | number; offset: string | number; } export interface FormProps extends RendererProps, FormSchema { store: IFormStore; wrapperComponent: React.ReactType; title?: string; // 标题 submitText?: string; submitOnChange?: boolean; // 设置是否一修改就提交。 submitOnInit?: boolean; resetAfterSubmit?: boolean; initApi?: Api; // 可以用来设置初始数据。 initAsyncApi?: Api; // 如果 api 处理时间过长,可以开启 initAsyncApi 来处理。轮询检测是否真的完成了。 initCheckInterval?: number; initFinishedField?: string; interval?: number; silentPolling?: boolean; stopAutoRefreshWhen?: string; api?: Api; // 用来保存的 api asyncApi?: Api; // 如果 api 处理时间过长,可以开启 asyncApi 来处理。轮询检测是否真的完成了。 checkInterval?: number; finishedField?: string; initFetch?: boolean; // 是否初始拉取? initFetchOn?: string; className?: string; body?: SchemaNode; wrapWithPanel?: boolean; panelClassName?: string; mode?: 'normal' | 'inline' | 'horizontal' | 'row'; affixFooter?: boolean; collapsable?: boolean; debug?: boolean; autoFocus?: boolean; horizontal: FormHorizontal; canAccessSuperData: boolean; persistData: boolean; // 开启本地缓存 clearPersistDataAfterSubmit: boolean; // 提交成功后清空本地缓存 trimValues?: boolean; onInit?: (values: object) => any; onReset?: (values: object) => void; onSubmit?: (values: object, action: any) => any; onChange?: (values: object, diff: object) => any; onFailed?: (reason: string, errors: any) => any; onFinished: (values: object, action: any) => any; onValidate: (values: object, form: any) => any; messages: { fetchSuccess?: string; fetchFailed?: string; saveSuccess?: string; saveFailed?: string; validateFailed?: string; }; } export default class Form extends React.Component<FormProps, object> { static defaultProps = { title: '表单', submitText: '提交', initFetch: true, wrapWithPanel: true, mode: 'normal', collapsable: false, controlWidth: 'full', horizontal: { left: 2, right: 10, offset: 2 }, panelClassName: 'Panel--default', messages: { fetchFailed: '初始化失败', saveSuccess: '保存成功', saveFailed: '保存失败' }, wrapperComponent: '', finishedField: 'finished', initFinishedField: 'finished' }; static propsList: Array<string> = [ 'title', 'controls', 'tabs', 'fieldSet', 'submitText', 'initFetch', 'wrapWithPanel', 'mode', 'collapsable', 'horizontal', 'panelClassName', 'messages', 'wrapperComponent', 'resetAfterSubmit', 'submitOnInit', 'submitOnChange', 'onInit', 'onReset', 'onSubmit', 'onChange', 'onFailed', 'onFinished', 'canAccessSuperData' ]; hooks: { [propName: string]: Array<() => Promise<any>>; } = {}; asyncCancel: () => void; disposeOnValidate: () => void; shouldLoadInitApi: boolean = false; timer: NodeJS.Timeout; mounted: boolean; constructor(props: FormProps) { super(props); this.onInit = this.onInit.bind(this); this.handleAction = this.handleAction.bind(this); this.handleDialogConfirm = this.handleDialogConfirm.bind(this); this.handleDialogClose = this.handleDialogClose.bind(this); this.handleDrawerConfirm = this.handleDrawerConfirm.bind(this); this.handleDrawerClose = this.handleDrawerClose.bind(this); this.handleFormSubmit = this.handleFormSubmit.bind(this); this.validate = this.validate.bind(this); this.submit = this.submit.bind(this); this.addHook = this.addHook.bind(this); this.removeHook = this.removeHook.bind(this); this.handleChange = debouce(this.handleChange.bind(this), 250, { trailing: true, leading: false }); this.renderFormItems = this.renderFormItems.bind(this); this.reload = this.reload.bind(this); this.silentReload = this.silentReload.bind(this); this.initInterval = this.initInterval.bind(this); } componentWillMount() { const {store, canAccessSuperData, persistData} = this.props; store.setCanAccessSuperData(canAccessSuperData !== false); persistData && store.getPersistData(); if ( store && store.parentStore && store.parentStore.storeType === 'ComboStore' ) { const combo = store.parentStore as IComboStore; combo.addForm(store); combo.forms.forEach(item => item.items.forEach(item => item.unique && item.syncOptions()) ); } } componentDidMount() { const { initApi, initFetch, initFetchOn, initAsyncApi, initFinishedField, initCheckInterval, store, messages: {fetchSuccess, fetchFailed}, onValidate } = this.props; this.mounted = true; if (onValidate) { const finnalValidate = promisify(onValidate); this.disposeOnValidate = this.addHook(async () => { const result = await finnalValidate(store.data, store); if (result && isObject(result)) { Object.keys(result).forEach(key => { let msg = result[key]; const item = store.getItemByName(key); // 没有这个 formItem if (!item) { return; } if (msg) { msg = Array.isArray(msg) ? msg : [msg]; item.addError(msg); } else { item.clearError(); } }); } }); } if (isEffectiveApi(initApi, store.data, initFetch, initFetchOn)) { store .fetchInitData(initApi as any, store.data, { successMessage: fetchSuccess, errorMessage: fetchFailed, onSuccess: () => { if ( !isEffectiveApi(initAsyncApi, store.data) || store.data[initFinishedField || 'finished'] ) { return; } return until( () => store.checkRemote(initAsyncApi, store.data), (ret: any) => ret && ret[initFinishedField || 'finished'], cancel => (this.asyncCancel = cancel), initCheckInterval ); } }) .then(this.initInterval) .then(this.onInit); } else { this.onInit(); } } componentDidUpdate(prevProps: FormProps) { const props = this.props; const store = props.store; if ( isApiOutdated( prevProps.initApi, props.initApi, prevProps.data, props.data ) ) { const {fetchSuccess, fetchFailed} = props; store .fetchData(props.initApi as Api, store.data, { successMessage: fetchSuccess, errorMessage: fetchFailed }) .then(this.initInterval); } } componentWillUnmount() { this.mounted = false; clearTimeout(this.timer); (this.handleChange as any).cancel(); this.asyncCancel && this.asyncCancel(); this.disposeOnValidate && this.disposeOnValidate(); const store = this.props.store; if ( store && store.parentStore && store.parentStore.storeType === 'ComboStore' ) { const combo = store.parentStore as IComboStore; combo.removeForm(store); } } async onInit() { const {onInit, store, submitOnInit} = this.props; // 先拿出来数据,主要担心 form 被什么东西篡改了,然后又应用出去了 // 之前遇到过问题,所以拿出来了。但是 options loadOptions 默认值失效了。 // 所以目前需要两个都要设置一下,再 init Hook 里面。 const data = {...store.data}; store.setInited(true); const hooks: Array<(data: any) => Promise<any>> = this.hooks['init'] || []; await Promise.all(hooks.map(hook => hook(data))); onInit && onInit(extendObject(store.data, data)); submitOnInit && this.handleAction( undefined, { type: 'submit' }, store.data ); } reload(query?: any, silent?: boolean) { if (query) { return this.receive(query); } const { store, initApi, initAsyncApi, initFinishedField, messages: {fetchSuccess, fetchFailed} } = this.props; isEffectiveApi(initAsyncApi, store.data) && store.updateData({ [initFinishedField || 'finished']: false }); isEffectiveApi(initApi, store.data) ? store .fetchInitData(initApi, store.data, { successMessage: fetchSuccess, errorMessage: fetchFailed, silent, onSuccess: () => { if ( !isEffectiveApi(initAsyncApi, store.data) || store.data[initFinishedField || 'finished'] ) { return; } return until( () => store.checkRemote(initAsyncApi, store.data), (ret: any) => ret && ret[initFinishedField || 'finished'], cancel => (this.asyncCancel = cancel) ); } }) .then(this.initInterval) .then(() => store.reset(undefined, false)) : store.reset(undefined, false); } receive(values: object) { const {store} = this.props; store.updateData(values); this.reload(); } silentReload(target?: string, query?: any) { this.reload(query, true); } initInterval(value: any) { const {interval, silentPolling, stopAutoRefreshWhen, data} = this.props; clearTimeout(this.timer); interval && this.mounted && (!stopAutoRefreshWhen || !evalExpression(stopAutoRefreshWhen, data)) && (this.timer = setTimeout( silentPolling ? this.silentReload : this.reload, Math.max(interval, 3000) )); return value; } isValidated() { return this.props.store.validated; } validate(forceValidate?: boolean): Promise<boolean> { const {store} = this.props; return store.validate(this.hooks['validate'] || [], forceValidate); } clearErrors() { const {store} = this.props; return store.clearErrors(); } submit(fn?: (values: object) => Promise<any>): Promise<any> { const {store, messages} = this.props; return store.submit( fn, this.hooks['validate' || []], messages && messages.validateFailed ); } reset() { const {store, onReset} = this.props; store.reset(onReset); } addHook(fn: () => any, type: string = 'validate') { this.hooks[type] = this.hooks[type] || []; this.hooks[type].push(promisify(fn)); return () => { this.removeHook(fn, type); fn = noop; }; } removeHook(fn: () => any, type: string = 'validate') { const hooks = this.hooks[type]; if (!hooks) { return; } for (let i = 0, len = hooks.length; i < len; i++) { let hook = hooks[i]; if ((hook as any).raw === fn) { hooks.splice(i, 1); len--; i--; } } } handleChange(value: any, name: string, submit: boolean) { const {onChange, store, submitOnChange} = this.props; onChange && onChange(store.data, difference(store.data, store.pristine)); (submit || submitOnChange) && this.handleAction( undefined, { type: 'submit' }, store.data ); } handleFormSubmit(e: React.UIEvent<any>) { e.preventDefault(); return this.handleAction( e, { type: 'submit' }, this.props.store.data ); } handleAction( e: React.UIEvent<any> | void, action: Action, data: object, throwErrors: boolean = false, delegate?: boolean ): any { const { store, onSubmit, api, asyncApi, finishedField, checkInterval, messages: {saveSuccess, saveFailed}, resetAfterSubmit, onAction, onSaved, onReset, onFinished, onFailed, redirect, reload, target, env, onChange, clearPersistDataAfterSubmit, trimValues } = this.props; if (trimValues) { store.trimValues(); } if (Array.isArray(action.required) && action.required.length) { return store.validateFields(action.required).then(result => { if (!result) { env.notify('error', '依赖的部分字段没有通过验证,请注意填写!'); } else { this.handleAction( e, {...action, required: undefined}, data, throwErrors, delegate ); } }); } delegate || store.setCurrentAction(action); if ( action.type === 'submit' || action.actionType === 'submit' || action.actionType === 'confirm' ) { return this.submit( (values): any => { if (onSubmit && onSubmit(values, action) === false) { return Promise.resolve(values); } if (target) { this.submitToTarget(target, values); } else if (action.actionType === 'reload') { action.target && this.reloadTarget(action.target, values); } else if (action.actionType === 'dialog') { store.openDialog(data); } else if (action.actionType === 'drawer') { store.openDrawer(data); } else if (isEffectiveApi(action.api || api, values)) { let finnalAsyncApi = action.asyncApi || asyncApi; isEffectiveApi(finnalAsyncApi, store.data) && store.updateData({ [finishedField || 'finished']: false }); return store .saveRemote(action.api || (api as Api), values, { successMessage: saveSuccess, errorMessage: saveFailed, onSuccess: () => { if ( !isEffectiveApi(finnalAsyncApi, store.data) || store.data[finishedField || 'finished'] ) { return; } return until( () => store.checkRemote(finnalAsyncApi as Api, store.data), (ret: any) => ret && ret[finishedField || 'finished'], cancel => (this.asyncCancel = cancel), checkInterval ); } }) .then(async response => { onSaved && onSaved(values, response); // submit 也支持 feedback if (action.feedback && isVisible(action.feedback, store.data)) { await this.openFeedback(action.feedback, store.data); } return values; }); } return Promise.resolve(values); } ) .then(values => { if (onFinished && onFinished(values, action) === false) { return values; } resetAfterSubmit && store.reset(onReset); clearPersistDataAfterSubmit && store.clearPersistData(); if (action.redirect || redirect) { env.updateLocation(filter(action.redirect || redirect, store.data)); } else if (action.reload || reload) { this.reloadTarget(action.reload || reload, store.data); } return values; }) .catch(reason => { onFailed && onFailed(reason, store.errors); if (throwErrors) { throw reason; } }); } else if (action.type === 'reset') { store.reset(onReset); } else if (action.actionType === 'dialog') { store.openDialog(data); } else if (action.actionType === 'drawer') { store.openDrawer(data); } else if (action.actionType === 'ajax') { if (!isEffectiveApi(action.api)) { return env.alert(`当 actionType 为 ajax 时,请设置 api 属性`); } return store .saveRemote(action.api as Api, data, { successMessage: (action.messages && action.messages.success) || saveSuccess, errorMessage: (action.messages && action.messages.failed) || saveFailed }) .then(async response => { response && onChange && onChange(store.data, difference(store.data, store.pristine)); if (store.validated) { await this.validate(true); } if (action.feedback && isVisible(action.feedback, store.data)) { await this.openFeedback(action.feedback, store.data); } action.redirect && env.updateLocation(filter(action.redirect, store.data)); action.reload && this.reloadTarget(action.reload, store.data); }) .catch(() => {}); } else if (action.actionType === 'reload') { action.target && this.reloadTarget(action.target, data); } else if (onAction) { // 不识别的丢给上层去处理。 return onAction(e, action, data, throwErrors); } } handleDialogConfirm( values: object[], action: Action, ctx: any, targets: Array<any> ) { const {store, onChange} = this.props; if ( (action.mergeData || store.action.mergeData) && values.length === 1 && values[0] && targets[0].props.type === 'form' ) { store.updateData(values[0]); onChange && onChange(store.data, difference(store.data, store.pristine)); } store.closeDialog(true); } handleDialogClose() { const {store} = this.props; store.closeDialog(false); } handleDrawerConfirm( values: object[], action: Action, ctx: any, targets: Array<any> ) { const {store, onChange} = this.props; if ( (action.mergeData || store.action.mergeData) && values.length === 1 && values[0] && targets[0].props.type === 'form' ) { store.updateData(values[0]); onChange && onChange(store.data, difference(store.data, store.pristine)); } store.closeDrawer(true); } handleDrawerClose() { const {store} = this.props; store.closeDrawer(false); } submitToTarget(target: string, values: object) { // 会被覆写 } reloadTarget(target: string, data?: any) { // 会被覆写 } openFeedback(dialog: any, ctx: any) { return new Promise(resolve => { const {store} = this.props; store.setCurrentAction({ type: 'button', actionType: 'dialog', dialog: dialog }); store.openDialog(ctx, undefined, confirmed => { resolve(confirmed); }); }); } buildActions() { const {actions, submitText, controls} = this.props; if ( typeof actions !== 'undefined' || !submitText || (Array.isArray(controls) && controls.some( item => !!~['submit', 'button', 'reset', 'button-group'].indexOf( (item as Schema).type ) )) ) { return actions; } return [ { type: 'submit', label: submitText, primary: true } ]; } renderFormItems( schema: FormSchema, region: string = '', otherProps: Partial<FormProps> = {} ): React.ReactNode { return this.renderControls( schema.controls as SchemaNode, region, otherProps ); // return schema.tabs ? this.renderTabs(schema.tabs, schema, region) // : schema.fieldSet ? this.renderFiledSet(schema.fieldSet, schema, region) : this.renderControls(schema.controls as SchemaNode, schema, region); } renderControls( controls: SchemaNode, region: string, otherProps: Partial<FormProps> = {} ): React.ReactNode { controls = controls || []; if (!Array.isArray(controls)) { controls = [controls]; } if (this.props.mode === 'row') { const ns = this.props.classPrefix; controls = flatten(controls).filter(item => { if ((item as Schema).hidden || (item as Schema).visible === false) { return false; } const exprProps = getExprProperties( item as Schema, this.props.store.data ); if (exprProps.hidden || exprProps.visible === false) { return false; } return true; }); if (!controls.length) { return null; } return ( <div className={`${ns}Form-row`}> {controls.map((control, key) => ~['hidden', 'formula'].indexOf((control as any).type) || (control as any).mode === 'inline' ? ( this.renderControl(control, key, otherProps) ) : ( <div key={key} className={cx( `${ns}Form-col`, (control as Schema).columnClassName )} > {this.renderControl(control, '', { ...otherProps, mode: 'row' })} </div> ) )} </div> ); } return controls.map((control, key) => this.renderControl(control, key, otherProps, region) ); } renderControl( control: SchemaNode, key: any = '', otherProps: Partial<FormProps> = {}, region: string = '' ): React.ReactNode { if (!control) { return null; } else if (typeof control === 'string') { control = { type: 'tpl', tpl: control }; } const props = { ...this.props, ...otherProps }; const form = this.props.store; const { render, mode, horizontal, store, disabled, controlWidth, resolveDefinitions } = props; const subProps = { formStore: form, data: store.data, key, formInited: form.inited, formMode: mode, formHorizontal: horizontal, controlWidth, disabled: disabled || (control as Schema).disabled || form.loading, btnDisabled: form.loading || form.validating, onAction: this.handleAction, onChange: this.handleChange, addHook: this.addHook, removeHook: this.removeHook, renderFormItems: this.renderFormItems, formPristine: form.pristine }; const subSchema: any = control && (control as Schema).type === 'control' ? control : { type: 'control', control }; if (subSchema.control) { let control = subSchema.control as Schema; if (control.$ref) { subSchema.control = control = { ...resolveDefinitions(control.$ref), ...control }; delete control.$ref; } control.hiddenOn && (subSchema.hiddenOn = control.hiddenOn); control.visibleOn && (subSchema.visibleOn = control.visibleOn); } return render(`${region ? `${region}/` : ''}${key}`, subSchema, subProps); } renderBody() { const {tabs, fieldSet, controls} = this.props; return this.renderFormItems({ tabs, fieldSet, controls }); } render() { const { className, wrapWithPanel, render, title, store, panelClassName, debug, headerClassName, footerClassName, actionsClassName, bodyClassName, classPrefix: ns, classnames: cx, $path, affixFooter, mode } = this.props; const WrapperComponent = this.props.wrapperComponent || (/(?:\/|^)form\//.test($path as string) ? 'div' : 'form'); let body = ( <WrapperComponent onSubmit={this.handleFormSubmit} className={cx(`Form`, `Form--${mode || 'normal'}`, className)} noValidate > {debug ? ( <pre> <code>{JSON.stringify(store.data, null, 2)}</code> </pre> ) : null} {this.renderBody()} {render( 'modal', { ...((store.action as Action) && ((store.action as Action).dialog as object)), type: 'dialog' }, { key: 'dialog', data: store.dialogData, onConfirm: this.handleDialogConfirm, onClose: this.handleDialogClose, show: store.dialogOpen } )} {render( 'modal', { ...((store.action as Action) && ((store.action as Action).drawer as object)), type: 'drawer' }, { key: 'drawer', data: store.drawerData, onConfirm: this.handleDrawerConfirm, onClose: this.handleDrawerClose, show: store.drawerOpen } )} </WrapperComponent> ); if (wrapWithPanel) { body = render( 'body', { type: 'panel', title: title }, { className: cx(panelClassName, 'Panel--form'), children: body, actions: this.buildActions(), onAction: this.handleAction, disabled: store.loading, btnDisabled: store.loading || store.validating, headerClassName, footerClassName, actionsClassName, bodyClassName, affixFooter } ) as JSX.Element; } return body; } } @Renderer({ test: (path: string) => /(^|\/)form$/.test(path) && !/(^|\/)form(?:\/.+)?\/control\/form$/.test(path), storeType: FormStore.name, name: 'form', isolateScope: true }) export class FormRenderer extends Form { static contextType = ScopedContext; componentWillMount() { const scoped = this.context as IScopedContext; scoped.registerComponent(this); super.componentWillMount(); } componentDidMount() { super.componentDidMount(); if (this.props.autoFocus) { const scoped = this.context as IScopedContext; const inputs = scoped.getComponents(); let focuableInput = find( inputs, input => input.focus ) as ScopedComponentType; focuableInput && setTimeout(() => focuableInput.focus!(), 200); } } componentWillUnmount() { const scoped = this.context as IScopedContext; scoped.unRegisterComponent(this); } doAction(action: Action, data: object, throwErrors: boolean = false) { return this.handleAction(undefined, action, data, throwErrors); } handleAction( e: React.UIEvent<any> | undefined, action: Action, ctx: object, throwErrors: boolean = false, delegate?: boolean ) { if (action.target && action.actionType !== 'reload') { const scoped = this.context as IScopedContext; return Promise.all( action.target.split(',').map(name => { let target = scoped.getComponentByName(name); return ( target && target.doAction && target.doAction( { ...action, target: undefined }, ctx, throwErrors ) ); }) ); } else { return super.handleAction(e, action, ctx, throwErrors, delegate); } } handleDialogConfirm( values: object[], action: Action, ctx: any, targets: Array<any> ) { super.handleDialogConfirm(values, action, ctx, targets); const store = this.props.store; const scoped = this.context as IScopedContext; if (action.reload) { scoped.reload(action.reload, ctx); } else if (store.action && store.action.reload) { scoped.reload(store.action.reload, ctx); } } submitToTarget(target: string, values: object) { const scoped = this.context as IScopedContext; scoped.send(target, values); } reloadTarget(target: string, data: any) { const scoped = this.context as IScopedContext; scoped.reload(target, data); } reload(target?: string, query?: any, ctx?: any) { if (query) { return this.receive(query); } const scoped = this.context as IScopedContext; let subPath: string = ''; let idx: number; let subQuery: any = null; if (target && ~(idx = target.indexOf('.'))) { subPath = target.substring(idx + 1); target = target.substring(0, idx); } const idx2 = target ? target.indexOf('?') : -1; if (~idx2) { subQuery = dataMapping( qs.parse((target as string).substring(idx2 + 1)), ctx ); target = (target as string).substring(0, idx2); } let component; if ( target && (component = scoped.getComponentByName(target)) && component.reload ) { component.reload(subPath, subQuery, ctx); } else if (target === '*') { super.reload(); const components = scoped.getComponents(); components.forEach( (component: any) => component.reload && component.reload('', subQuery, ctx) ); } else { super.reload(); } } receive(values: object, name?: string) { if (name) { const scoped = this.context as IScopedContext; const idx = name.indexOf('.'); let subPath = ''; if (~idx) { subPath = name.substring(1 + idx); name = name.substring(0, idx); } const component = scoped.getComponentByName(name); component && component.receive && component.receive(values, subPath); return; } return super.receive(values); } }
<reponame>DiaoYung/GooglePlayer package example.com.googleplay.utils; /** * Created by root on 16-12-15. */ public class BitmapHelper { }
<reponame>donghun-cho/pinpoint /* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.web.util; import com.navercorp.pinpoint.common.server.util.time.Range; import java.util.concurrent.TimeUnit; /** * @author emeroad */ public class TimeWindowDownSampler implements TimeWindowSampler { private static final long ONE_MINUTE = 6000 * 10; private static final long ONE_HOUR = TimeUnit.HOURS.toMillis(1); private static final long SIX_HOURS = TimeUnit.HOURS.toMillis(6); private static final long TWELVE_HOURS = TimeUnit.HOURS.toMillis(12); private static final long ONE_DAY = TimeUnit.DAYS.toMillis(1); private static final long TWO_DAY = TimeUnit.DAYS.toMillis(2); public static final TimeWindowSampler SAMPLER = new TimeWindowDownSampler(); @Override public long getWindowSize(Range range) { final long diff = range.durationMillis(); long size; if (diff <= ONE_HOUR) { size = ONE_MINUTE; } else if (diff <= SIX_HOURS) { size = ONE_MINUTE * 5; } else if (diff <= TWELVE_HOURS) { size = ONE_MINUTE * 10; } else if (diff <= ONE_DAY) { size = ONE_MINUTE * 20; } else if (diff <= TWO_DAY) { size = ONE_MINUTE * 30; } else { size = ONE_MINUTE * 60; } return size; } }
/** * * * * @author The Stajistics Project */ public class DefaultStatsKeyBuilderTest extends AbstractStajisticsTestCase { private StatsKey mockKey; private StatsKeyFactory mockKeyFactory; @Before public void setUp() { mockKey = mockery.mock(StatsKey.class); mockKeyFactory = new DefaultStatsKeyFactory(); // TODO: actually mock this } private void buildStatsKeyBuildCopyExpectations() { mockery.checking(new Expectations() {{ ignoring(mockKey).buildCopy(); will(returnValue(new DefaultStatsKeyBuilder(mockKey, mockKeyFactory))); }}); } @Test public void testConstructWithNullNamespace() { StatsKeyBuilder builder = new DefaultStatsKeyBuilder(null, "test", mockKeyFactory); assertEquals(StatsConstants.DEFAULT_NAMESPACE, builder.newKey().getNamespace()); } @Test public void testConstructWithNullName() { StatsKeyBuilder builder = new DefaultStatsKeyBuilder("namespace", (String)null, mockKeyFactory); assertEquals("<null>", builder.newKey().getName()); } @Test public void testConstructWithNameAndNullKeyFactory() { StatsKeyBuilder builder = new DefaultStatsKeyBuilder("namespace", "test", null); StatsKey key = builder.newKey(); // Assert no exceptions assertSame(NullStatsKeyBuilder.getInstance(), key.buildCopy()); } @Test public void testConstructWithNullTemplate() { try { new DefaultStatsKeyBuilder((StatsKey)null, mockKeyFactory); } catch (NullPointerException npe) { assertEquals("template", npe.getMessage()); } } @Test public void testConstructWithTemplateAndNullKeyFactory() { buildStatsKeyExpectations(mockery, mockKey, "test"); StatsKeyBuilder builder = new DefaultStatsKeyBuilder(mockKey, null); StatsKey key = builder.newKey(); assertSame(NullStatsKeyBuilder.getInstance(), key.buildCopy()); } @Test public void testCopyKeyNotNull() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .newKey(); assertTrue(key != null); } @Test public void testCopyKeyWithNullNameSuffix() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy().withNameSuffix(null).newKey(); assertEquals("test.<null>", key.getName()); } @Test public void testCopyKeyWithEmptyNameSuffix() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withNameSuffix("") .newKey(); assertEquals("test", key.getName()); } @Test public void testCopyKeyWithNameSuffix1() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withNameSuffix("suffix") .newKey(); assertEquals("test.suffix", key.getName()); } @Test public void testCopyKeyWithNameSuffix2() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withNameSuffix("suffix1") .withNameSuffix("suffix2") .newKey(); assertEquals("test.suffix1.suffix2", key.getName()); } @Test public void testCopyKeyWithAttribute1() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("test", "value") .newKey(); assertEquals(1, key.getAttributeCount()); assertEquals(1, key.getAttributes().size()); assertEquals("value", key.getAttribute("test")); assertEquals("value", key.getAttributes().get("test")); } @Test public void testCopyKeyWithAttribute2() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("test", "value") .newKey(); assertEquals(2, key.getAttributeCount()); assertEquals(2, key.getAttributes().size()); assertEquals("attribute", key.getAttribute("existing")); assertEquals("value", key.getAttribute("test")); assertEquals("attribute", key.getAttributes().get("existing")); assertEquals("value", key.getAttributes().get("test")); } @Test public void testCopyKeyWithTwoAttributes1() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("test1", "value1") .withAttribute("test2", "value2") .newKey(); assertEquals(2, key.getAttributeCount()); assertEquals(2, key.getAttributes().size()); assertEquals("value1", key.getAttribute("test1")); assertEquals("value2", key.getAttribute("test2")); assertEquals("value1", key.getAttributes().get("test1")); assertEquals("value2", key.getAttributes().get("test2")); } @Test public void testCopyKeyWithTwoAttributes2() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("test1", "value1") .withAttribute("test2", "value2") .newKey(); assertEquals(3, key.getAttributeCount()); assertEquals(3, key.getAttributes().size()); assertEquals("attribute", key.getAttribute("existing")); assertEquals("value1", key.getAttribute("test1")); assertEquals("value2", key.getAttribute("test2")); assertEquals("attribute", key.getAttributes().get("existing")); assertEquals("value1", key.getAttributes().get("test1")); assertEquals("value2", key.getAttributes().get("test2")); } @Test public void testCopyKeyWithThreeAttributes1() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("test1", "value1") .withAttribute("test2", "value2") .withAttribute("test3", "value3") .newKey(); assertEquals(3, key.getAttributeCount()); assertEquals(3, key.getAttributes().size()); assertEquals("value1", key.getAttribute("test1")); assertEquals("value2", key.getAttribute("test2")); assertEquals("value3", key.getAttribute("test3")); assertEquals("value1", key.getAttributes().get("test1")); assertEquals("value2", key.getAttributes().get("test2")); assertEquals("value3", key.getAttributes().get("test3")); } @Test public void testCopyKeyWithThreeAttributes2() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("test1", "value1") .withAttribute("test2", "value2") .withAttribute("test3", "value3") .newKey(); assertEquals(4, key.getAttributeCount()); assertEquals(4, key.getAttributes().size()); assertEquals("attribute", key.getAttribute("existing")); assertEquals("value1", key.getAttribute("test1")); assertEquals("value2", key.getAttribute("test2")); assertEquals("value3", key.getAttribute("test3")); assertEquals("attribute", key.getAttributes().get("existing")); assertEquals("value1", key.getAttributes().get("test1")); assertEquals("value2", key.getAttributes().get("test2")); assertEquals("value3", key.getAttributes().get("test3")); } @Test public void testWithoutAttribute1() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withoutAttribute("existing") .newKey(); assertEquals(0, key.getAttributeCount()); assertTrue(key.getAttributes().isEmpty()); } @Test public void testWithoutAttribute2() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withoutAttribute("existing") .withAttribute("new", "thing") .newKey(); assertEquals(1, key.getAttributeCount()); assertEquals(1, key.getAttributes().size()); assertNull(key.getAttribute("existing")); assertEquals("thing", key.getAttribute("new")); } @Test public void testWithoutAttribute3() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("new", "thing") .withoutAttribute("existing") .newKey(); assertEquals(1, key.getAttributeCount()); assertEquals(1, key.getAttributes().size()); assertNull(key.getAttribute("existing")); assertEquals("thing", key.getAttribute("new")); } @Test public void testWithoutAttributeWithNullName() { buildStatsKeyExpectations(mockery, mockKey, "test"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("new", "thing") .withoutAttribute(null) .newKey(); assertEquals(1, key.getAttributeCount()); assertEquals(1, key.getAttributes().size()); assertEquals("thing", key.getAttribute("new")); } @Test public void testPutAttributeWithNullName() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute(null, "value") .newKey(); assertEquals("value", key.getAttribute("<null>")); } @Test public void testPutAttributeWithNullStringValue() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("name", (String)null) .newKey(); assertNull(key.getAttribute("name")); } @Test public void testPutAttributeWithNullBooleanValue() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("name", (Boolean)null) .newKey(); assertNull(key.getAttribute("name")); } @Test public void testPutAttributeWithNullLongValue() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("name", (Long)null) .newKey(); assertNull(key.getAttribute("name")); } @Test public void testPutAttributeWithNullIntegerValue() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKey key = mockKey.buildCopy() .withAttribute("name", (Integer)null) .newKey(); assertNull(key.getAttribute("name")); } @Test public void testWithAttributeReturnsThis() { buildStatsKeyExpectations(mockery, mockKey, "test", "existing", "attribute"); buildStatsKeyBuildCopyExpectations(); StatsKeyBuilder kb = mockKey.buildCopy(); assertSame(kb, kb.withAttribute("string", "string")); assertSame(kb, kb.withAttribute("int", 0)); assertSame(kb, kb.withAttribute("boolean", false)); assertSame(kb, kb.withAttribute("long", 0L)); } }
/** * Converts an integer index into a matrix with these dimensions into a vector index. * The effect of this method is the same as calling * this.convert(index, new int[this.numDimensions()]). * @return an array of ints that represents a vector index corresponding to the specified * integer index into a this.dimensions[0]x...xthis.dimensions[n-1] matrix * @throws IndexOutOfBoundsException !validate(index) */ public final int[] convert(int index) { final int[] vector = new int[numDimensions()]; convert(index, vector); return vector; }
Former Coalition adviser Tristan Weston charged with impersonating lawyer Posted Former Victorian Coalition adviser Tristan Weston has appeared in the Melbourne Magistrates Court on charges of impersonating a lawyer. Weston was a police officer and an adviser to former deputy premier Peter Ryan. He was also a key figure in the tumult surrounding the a rift in Victoria Police command and the demise of chief commissioner Simon Overland in 2012. Court documents showed Weston was charged in March by the Legal Services Board with four offences dating back to January 2014. The board's chief executive officer, Michael McGarvie, alleged Weston, 44, "engaged in legal practice whilst not being an Australian legal practitioner". It was alleged he attended the Melbourne East police station as a lawyer and sat in on a police interview for someone charged with handling stolen goods, after a man's iPad went missing from a city cafe. The board alleged the accused person gave a no-comment interview after Weston passed him a note. The charge sheets also said Weston claimed to be working for law firm Maitland Lawyers and later refused police offers to withdraw or resolve the theft charge. He was also accused of appearing as an instructing lawyer at the bar table during the accused's court case. "He ... corresponded by email dated 16 May 2014 giving a legal opinion on the strength of the case and prospects of success and seeking settlement of the matter," the case summary said. "The email was sent from his Maitland Lawyers email address and signed in his name and written in the first person." Weston was interviewed at the office of the Legal Services Board in January this year. At a mention in court on Wednesday, Weston's lawyer Michael McNamara said requests for information from police about the case had gone unanswered. He said Weston was attempting to become a lawyer and the case against him was preventing him from succeeding. "Don't underestimate how big this is Your Honour," Mr McNamara said. "My client's livelihood as a lawyer is on the line. "It's stopping him from being admitted as a lawyer. He's been stuck with these allegations for a very long time." The parties will return to court next month. Topics: state-parliament, courts-and-trials, law-crime-and-justice, melbourne-3000
/** * TrafficRuleConfiguration rule configuration YAML swapper. */ public final class TrafficRuleConfigurationYamlSwapper implements YamlRuleConfigurationSwapper<YamlTrafficRuleConfiguration, TrafficRuleConfiguration> { private final TrafficStrategyConfigurationYamlSwapper strategySwapper = new TrafficStrategyConfigurationYamlSwapper(); private final ShardingSphereAlgorithmConfigurationYamlSwapper algorithmSwapper = new ShardingSphereAlgorithmConfigurationYamlSwapper(); @Override public YamlTrafficRuleConfiguration swapToYamlConfiguration(final TrafficRuleConfiguration data) { YamlTrafficRuleConfiguration result = new YamlTrafficRuleConfiguration(); data.getTrafficStrategies().forEach(each -> result.getTrafficStrategies().put(each.getName(), strategySwapper.swapToYamlConfiguration(each))); setYamlAlgorithms(data, result); return result; } private void setYamlAlgorithms(final TrafficRuleConfiguration data, final YamlTrafficRuleConfiguration yamlConfig) { if (null != data.getTrafficAlgorithms()) { data.getTrafficAlgorithms().forEach((key, value) -> yamlConfig.getTrafficAlgorithms().put(key, algorithmSwapper.swapToYamlConfiguration(value))); } if (null != data.getLoadBalancers()) { data.getLoadBalancers().forEach((key, value) -> yamlConfig.getLoadBalancers().put(key, algorithmSwapper.swapToYamlConfiguration(value))); } } @Override public TrafficRuleConfiguration swapToObject(final YamlTrafficRuleConfiguration yamlConfig) { TrafficRuleConfiguration result = new TrafficRuleConfiguration(); for (Entry<String, YamlTrafficStrategyConfiguration> entry : yamlConfig.getTrafficStrategies().entrySet()) { YamlTrafficStrategyConfiguration strategyConfig = entry.getValue(); strategyConfig.setName(entry.getKey()); result.getTrafficStrategies().add(strategySwapper.swapToObject(strategyConfig)); } setAlgorithms(yamlConfig, result); return result; } private void setAlgorithms(final YamlTrafficRuleConfiguration yamlConfig, final TrafficRuleConfiguration ruleConfig) { if (null != yamlConfig.getTrafficAlgorithms()) { yamlConfig.getTrafficAlgorithms().forEach((key, value) -> ruleConfig.getTrafficAlgorithms().put(key, algorithmSwapper.swapToObject(value))); } if (null != yamlConfig.getLoadBalancers()) { yamlConfig.getLoadBalancers().forEach((key, value) -> ruleConfig.getLoadBalancers().put(key, algorithmSwapper.swapToObject(value))); } } @Override public Class<TrafficRuleConfiguration> getTypeClass() { return TrafficRuleConfiguration.class; } @Override public String getRuleTagName() { return "TRAFFIC"; } @Override public int getOrder() { return TrafficOrder.ORDER; } }
#include "ChannelAdapter.h" #include "Conversions.h" #include "SOEHandlerAdapter.h" #include "OutstationCommandHandlerAdapter.h" #include "OutstationTimeWriteAdapter.h" #include "MasterAdapter.h" #include "OutstationAdapter.h" #include "DeleteAnything.h" #include <asiopal/UTCTimeSource.h> using namespace System::Collections::Generic; namespace DNP3 { namespace Adapter { ChannelAdapter::ChannelAdapter() { pMultiplexer = new EventMultiplexer<opendnp3::ChannelState, DNP3::Interface::ChannelState>(std::bind(&Conversions::convertChannelState, std::placeholders::_1)); } ChannelAdapter::~ChannelAdapter() { delete pMultiplexer; } void ChannelAdapter::SetChannel(asiodnp3::IChannel* pChannel_) { pChannel = pChannel_; } LogFilter ChannelAdapter::GetLogFilters() { return LogFilter(pChannel->GetLogFilters().GetBitfield()); } void ChannelAdapter::SetLogFilters(LogFilter filters) { openpal::LogFilters flags(filters.Flags); pChannel->SetLogFilters(flags); } void ChannelAdapter::AddStateListener(System::Action<ChannelState>^ listener) { pMultiplexer->AddListener(listener); } void CallbackListener(gcroot < System::Action<ChannelState> ^ >* listener, opendnp3::ChannelState aState) { ChannelState state = Conversions::convertChannelState(aState); (*listener)->Invoke(state); } IMaster^ ChannelAdapter::AddMaster(System::String^ loggerId, ISOEHandler^ publisher, MasterStackConfig^ config) { std::string stdLoggerId = Conversions::convertString(loggerId); MasterMeasurementHandlerWrapper^ wrapper = gcnew MasterMeasurementHandlerWrapper(publisher); opendnp3::MasterStackConfig cfg = Conversions::convertConfig(config); auto pMaster = pChannel->AddMaster(stdLoggerId.c_str(), wrapper->Get(), asiopal::UTCTimeSource::Inst(), cfg); // TODO expose time source if (pMaster == nullptr) { return nullptr; } else { return gcnew MasterAdapter(pMaster); } } IOutstation^ ChannelAdapter::AddOutstation(System::String^ loggerId, ICommandHandler^ cmdHandler, ITimeWriteHandler^ timeHandler, OutstationStackConfig^ config) { std::string stdLoggerId = Conversions::convertString(loggerId); OutstationCommandHandlerWrapper^ cmdWrapper = gcnew OutstationCommandHandlerWrapper(cmdHandler); OutstationTimeWriteWrapper^ timeWrapper = gcnew OutstationTimeWriteWrapper(timeHandler); opendnp3::OutstationStackConfig cfg = Conversions::convertConfig(config); auto pOutstation = pChannel->AddOutstation(stdLoggerId.c_str(), cmdWrapper->Get(), timeWrapper->Get(), Conversions::convertConfig(config)); if (pOutstation == nullptr) { return nullptr; } else { return gcnew OutstationAdapter(pOutstation); } } void ChannelAdapter::Shutdown() { pChannel->BeginShutdown(); } openpal::IEventHandler<opendnp3::ChannelState>* ChannelAdapter::GetEventHandler() { return this->pMultiplexer; } } }
/** * Main loop of the timer. * * If the timer expires the method TriggerAction() is called. **/ void * NewSimulatorTimerThread::Run() { cTime now; int delta; m_start = cTime::Now(); m_running = true; m_exit = false; stdlog << "DBG: Run Timerloop - with timeout " << m_timeout << "\n"; while( !m_exit ) { now = cTime::Now(); now -= m_start; delta = m_timeout - now.GetMsec(); if ( delta <= 0 ) { m_exit = TriggerAction(); } else if ( delta <= THREAD_SLEEPTIME / 1000 ) { usleep( delta*1000 ); } else { usleep( THREAD_SLEEPTIME ); } } m_running = false; stdlog << "DBG: Exit TimerLoop\n"; return 0; }
// Get method call the service of CPI func (h *HprePostBody) Get() (*RespHprePost, error) { url := config.GetURL(config.ConfiguratorURI) res, err := http.Get(url, h) if err != nil { return nil, err } defer res.Body.Close() if res.StatusCode != 200 { fmt.Println(res.StatusCode) respBody, err := ioutil.ReadAll(res.Body) fmt.Println(string(respBody), err) return nil, fmt.Errorf("get hpre support failed") } respBody, err := ioutil.ReadAll(res.Body) if err != nil { return nil, err } respPostIns := new(RespHprePost) err = json.Unmarshal(respBody, respPostIns) if err != nil { return nil, err } return respPostIns, nil }
It Takes an Act of Congress or Does It? Naming Practices of VA Facilities The Veterans Health Administration, a division under the US Department of Veterans Affairs, oversees more than 1700 healthcare facilities that provide support for more than eight million veterans annually. These facilities, grouped under geographically organized networks, have 89 named after individuals. This research examines why these individuals have been honored. This study also examines the legislation involved in naming the facilities and the other means by which Veterans Affairs healthcare facilities have been named. Since veterans’ healthcare is under scrutiny, an investigation into the naming practices, the relationship between the Veterans Health Administration and Congress, and those who strongly advocate naming facilities may provide insight into the manner in which veterans are honored.
/* * Return true if there is at least one active Datanode statement, so acquired * Datanode connections should not be released */ bool HaveActiveDatanodeStatements(void) { HASH_SEQ_STATUS seq; DatanodeStatement *entry; if (!datanode_queries) return false; hash_seq_init(&seq, datanode_queries); while ((entry = hash_seq_search(&seq)) != NULL) { if (entry->number_of_nodes > 0) { hash_seq_term(&seq); return true; } } return false; }
Rabbi Dr. Natan Slifkin, noted author, popular speaker, and director of the Biblical Museum of Natural History in Bet Shemesh, has been a frequent guest in the Five Towns over the years. In this interview, he discusses the forthcoming “Feast of Exotic Curiosities” at the museum. 5TJT: What is the background to this “Feast of Exotic Curiosities”? RNS: In the nearly three years since we opened the Biblical Museum of Natural History, we’ve had over 30,000 visitors, and we are branching beyond the standard museum experience into special events. Last year, we decided to produce an unusual experience, one that would relate to our mission of teaching about Torah and nature: “A Feast of Biblical Flora and Fauna.” It was a banquet of foods that were eaten by our ancestors in Tanach; soft matzah, doves, geese, the quails that the Bnei Yisrael ate in the wilderness, goat, sheep–even a whole roasted deer, as was served at King Solomon’s table daily, which had to be carried in by four people! Each dish was introduced with a presentation explaining the Torah significance of that dish. The event was amazing, and we wanted to do it again, but in a different way so as to make each event unique. So this year, instead of discussing and serving exotic foods that are in Tanach, we will be discussing and serving exotic foods that are not in Tanach, and which are of great interest from a halachic and gastronomic perspective. 5TJT: Can you give some examples? RNS: We are trying to keep the menu somewhat of a secret, and we haven’t finalized all of the dishes yet; there are some complicated exotic items about which we are still working on clarifying the halachic status. But I can say that one of the species of birds that we will be serving is pheasant. This is considered to be one of the most gourmet species, an ingredient of haute cuisine. However, many people do not realize that it is kosher, and it is certainly not usually available for the kosher consumer. Amongst mammals, we will be serving Asian water buffalo–again, something that is 100% kosher, and yet most Jews have never eaten it. This is an animal which is mentioned on numerous occasions in the Gemara and is of great halachic interest, due to the difficulty of determining whether it is a domestic or wild animal. 5TJT: Clearly, there are many more kosher species than just cows and chickens. But surely, at the end of the day, there are many more that we are never going to be able to taste. RNS: There are many things that we can never eat, but that doesn’t mean that we can’t enjoy their taste. The Gemara says that for everything that Hashem has prohibited, there is a kosher equivalent, including pork and meat with milk. We will be serving the kosher equivalent of both of those at the dinner–the Gemara names a certain fish as tasting like pork, but we have come up with a new bacon equivalent. We will also be serving “kosher oysters,” which are made with something very surprising! 5TJT: And that would be . . . ? RNS: Sorry, I can’t reveal it at this point, because there could be agitators who will make a furor about it, and I try to stay away from controversy . . . But I will say that the ingredients for our “kosher oyster” dish have been certified as kosher by the halachic authorities for major kashrus agencies. 5TJT: Is it easy to prepare such dishes? RNS: Chefs and caterers don’t have experience with such things, so when we planned this event, we knew that there was only one person to turn to: Chef Moshe Basson, internationally renowned as “Israel’s Biblical Chef.” He searches Tanach and the local countryside for clues to traditional dishes, which he recreates. Chef Basson was knighted in Italy for his resurrection of Biblical cuisine. He is thrilled by the challenge that we set for him with this event! 5TJT: Where do you get these animals and ingredients from? RNS: We’ve had things brought in from all over the world for this event. We’ve been raising some of the exotic species at the museum. It’s been quite a challenge–one of our liveliest creatures at the museum, Cutie the coati (a sort of large raccoon-like animal), managed to grab three of the most expensive birds and he bit their heads off! 5TJT: Are there any special guests at these events? RNS: Last year, we had Rabbi Dr. Tzvi Hersh Weinreb, who is on the museum board, as well as Rav Yosef Carmel, head of Eretz Chemdah, and the event was livestreamed by celebrity chef Jamie Geller. This year we look forward to hosting Rav Weinreb again, as well as other rabbanim, scholars, and public figures. 5TJT: I’m sure everyone wants to know–will there be locusts? RNS: Actually, locusts are the only item that we are repeating from last year’s menu, because they are always such a hit! But we are preparing them differently. Last year, we served them chocolate-covered, which makes them much easier to eat, because you can’t really see what you are eating. This year, we will be frying them, so that you can see that you are eating an insect! 5TJT: But how can Ashkenazim eat locusts? RNS: There are a lot of misunderstandings about this. Briefly, there is no Ashkenaz tradition against eating locusts. Rather, there is simply a lack of Ashkenaz tradition for eating locusts, since there were no locusts in Ashkenaz. Many poskim accordingly state that there is no problem in adopting the tradition from the Jews in North Africa, just as we adopt traditions for eating birds from those who have them. Not everybody agrees with this, and even some of those who do agree find it personally difficult to eat locusts; many Westerners are locust-intolerant! 5TJT: When and where is the Feast of Exotic Curiosities taking place, and who can come? RNS: In order to accommodate our American visitors, we have planned it for October 2, right before Sukkos. The feast takes place at the museum itself, which provides the most amazing setting for such an event. It is primarily for the patrons of the museum–those who support our work in teaching about the connection between Torah and the natural world. However, we also have a limited number of seats available for others. Details are at www.biblicalnaturalhistory.org/feast. 5TJT: Thank you, and bon appétit!
//***************************************************************************** // //! Sets the high resolution output on ePWMxB //! //! \param base is the base address of the EPWM module. //! \param outputOnB is the output signal on ePWMxB. //! //! This function sets the HRPWM output signal on ePWMxB. If outputOnB is //! HRPWM_OUTPUT_ON_B_INV_A, ePWMxB output is an inverted version of //! ePWMxA. If outputOnB is HRPWM_OUTPUT_ON_B_NORMAL, ePWMxB output is //! ePWMxB. //! //! \return None. // //***************************************************************************** static inline void HRPWM_setChannelBOutputPath(uint32_t base, HRPWM_ChannelBOutput outputOnB) { ASSERT(HRPWM_isBaseValid(base)); EALLOW; HWREGH(base + HRPWM_O_HRCNFG) = ((HWREGH(base + HRPWM_O_HRCNFG) & ~(HRPWM_HRCNFG_SELOUTB)) | ((uint16_t)outputOnB << 5U)); EDIS; }
def calibrate_button_handler(obj_response, camera, topic): calibrator_node = get_calibrator_from_topic(topic) homography_calibrator.start(calibrator_node) obj_response.html('#message', '') obj_response.html('#message_table', '') obj_response.attr('#message_table', 'style', 'display:none')
#include <stdio.h> #include <string.h> int main() { char girl[] = "CHAT WITH HER!"; char boy[] = "IGNORE HIM!"; char msg[100]; int uniqueWords = 0; int hashMap[26] = {0}; scanf("%s", msg); for (size_t i = 0; i < strlen(msg); i++) { // 122-97 int index = msg[i] - 97; hashMap[index]++; } for (size_t i = 0; i < 26; i++) { if (hashMap[i] > 0) { uniqueWords++; } // printf("%d", hashMap[i]); } // printf("%d", uniqueWords); if (uniqueWords % 2 == 0) printf("%s", girl); else printf("%s", boy); return 0; }
Retinal degeneration in celestial goldfish. Developmental study. The celestial goldfish were systematically reared from fertilization for life, revealing developmental processes of retinal degeneration. The eyes began to protrude laterally at the age of 90 days, rotated antero-dorsally at 120 days, and thus the 'celestial eye' was completed. The retina developed to normal mature structure by the age of 50 days. No degenerative findings were revealed before the start of eye extrusion. The first morphological change, irregular distribution of melanin granules in the pigment epithelial layer and disorientation of the outer segments of photoreceptors, was detected at the age of 90 days. At 120 days, the pigment epithelial layer lost melanin granules at places and was occupied by phagocytes. Photoreceptor cells were destroyed, and phagocytes, containing melanin granules, appeared in the inner retina and/or choroid. The retinal degeneration started simultaneously with eye protrusion. The regular telescopic-eye goldfish have large, extended eyes as those of celestial goldfish, but they develop no retinal degeneration. Celestial goldfish seems to be an interesting new type of hereditary retinal degeneration in vertebrates.
The House late Wednesday rejected an effort to free up federal doctors to be able to recommend medicinal use of marijuana to their patients, with opponents saying that as long as the drug remains illegal under federal law, employees paid for by taxpayer money shouldn’t be recommending it. The proposal would have let Veterans Administration doctors practicing in states that have approved marijuana for medical or recreational use talk to their patients about the drug. “Let’s get out of the way, make sure we empower the VA physicians to do their job,” said Rep. Dana Rohrabacher, California Republican. “Our veterans deserve that from us.” Rep. John Culberson, Texas Republican, said the issue of marijuana should be left to the states, but said as long as federal law prohibits its use, government employees shouldn’t be endorsing its use. And Rep. John Fleming, a Louisiana Republican who is a doctor and veteran, said it was a bad idea to introduce a potentially addictive drug to veteran patients. “The last thing in the word we should be doing is giving medical marijuana to people with these disorders,” he said. The proposal was defeated 222-195, with 204 Republicans and 18 Democrats opposed, and 22 Republicans and 173 Democrats in favor — a relatively close vote that reflects changing attitudes about the drug in the country at large. Twenty-one states and the District of Columbia have approved marijuana either for recreational or medical use. Some veterans want to see the government approve studies to determine whether marijuana could help them cope with traumatic brain injuries or post-traumatic stress disorder — both ailments that have seen a sharp rise during the war on terror. Wednesday’s vote came as part of the debate on the annual VA and military construction spending bill. Copyright © 2019 The Washington Times, LLC. Click here for reprint permission.
def add_arguments(self, parser): group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-d', '--downgrade', action='store_true', help='downgrade the J-Link firmware') group.add_argument('-u', '--upgrade', action='store_true', help='upgrade the J-Link firmware') return self.add_common_arguments(parser, False)
Origin of an antifreeze protein gene in response to Cenozoic climate change Antifreeze proteins (AFPs) inhibit ice growth within fish and protect them from freezing in icy seawater. Alanine-rich, alpha-helical AFPs (type I) have independently (convergently) evolved in four branches of fishes, one of which is a subsection of the righteye flounders. The origin of this gene family has been elucidated by sequencing two loci from a starry flounder, Platichthys stellatus, collected off Vancouver Island, British Columbia. The first locus had two alleles that demonstrated the plasticity of the AFP gene family, one encoding 33 AFPs and the other allele only four. In the closely related Pacific halibut, this locus encodes multiple Gig2 (antiviral) proteins, but in the starry flounder, the Gig2 genes were found at a second locus due to a lineage-specific duplication event. An ancestral Gig2 gave rise to a 3-kDa “skin” AFP isoform, encoding three Ala-rich 11-a.a. repeats, that is expressed in skin and other peripheral tissues. Subsequent gene duplications, followed by internal duplications of the 11 a.a. repeat and the gain of a signal sequence, gave rise to circulating AFP isoforms. One of these, the “hyperactive” 32-kDa Maxi likely underwent a contraction to a shorter 3.3-kDa “liver” isoform. Present day starry flounders found in Pacific Rim coastal waters from California to Alaska show a positive correlation between latitude and AFP gene dosage, with the shorter allele being more prevalent at lower latitudes. This study conclusively demonstrates that the flounder AFP arose from the Gig2 gene, so it is evolutionarily unrelated to the three other classes of type I AFPs from non-flounders. Additionally, this gene arose and underwent amplification coincident with the onset of ocean cooling during the Cenozoic ice ages. . Phylogenetic relationships amongst type I AFP-producing fishes and several other species within the clade, Percomorpha, that produce different AFPs 24,25 . The common name of species that produce AFPs are coloured red (type I), blue (type II), purple (type II) or green (antifreeze glycoprotein). The 95% highest posterior credibility intervals within the Pleuronectiformes are indicated with grey bars 24 . Pacific halibut and yellow perch (black) do not produce AFPs 26 . The coloured bar spanning 120 Ma indicates relative ocean temperatures with red corresponding to ice-free oceans and blue corresponding to glacial periods 27 . Schematics of the AFP types were generated in PyMOL 28 and fish images/drawings for shorthorn sculpin, dusky snailfish, cunner, winter flounder and starry flounder are from Wikimedia Commons (see Supplementary Material and Methods). Binomial names for the species are as follows; Myoxocephalus scorpius (shorthorn sculpin), Hemitripterus americanus (sea raven), Liparis gibbus (dusky snailfish), Zoarces americanus (ocean pout), Dissostichus mawsoni (Antarctic toothfish), Perca flavescens (yellow perch), Tautogolabrus adspersus (cunner), Hippoglossus stenolepis (Pacific halibut), Limanda ferruginea (yellowtail flounder), Hippoglossoides platessoides (American plaice), Pseudopleuronectes americanus (winter flounder), Platichthys stellatus (starry flounder), Pleuronectes pinnifasciatus (barfin plaice). Some species were not analyzed in the studies cited above, so the position of the following species with the same genus (dusky snailfish, Antarctic toothfish, Pacific halibut, yellowtail founder, American plaice, barfin plaice) or family (sea raven) was used as a proxy. Other fish, including Atlantic herring and rainbow smelt that are outside Percomorpha, also produce AFPs. The starry flounder, Platichthys stellatus, is a flatfish that inhabits shallow waters of the Northern Pacific Ocean from South Korea, up through the Bering Sea and down to California, as well as portions of the Arctic Ocean 38,39 . It is known to produce type I AFPs, but their sequences were previously unknown 40,41 . Loci containing AFP-like sequences were cloned from BAC libraries and both AFPs and the progenitor gene, Gig2 (grass carp reovirusinduced gene 2) 42 , were identified. Similarity between the loci is restricted to non-coding regions and Gig2 has a different function, related to viral resistance 43 . This demonstrates that the AFPs of Pleuronectiformes arose recently and independently of the type I AFPs of other fishes. The two alleles at the AFP locus are very different, containing 4 and 33 AFPs with Southern blotting demonstrating that gene copy number increases with latitude. Results Part 1: flounder loci. Starry flounder AFP genes reside at a single locus. Two BAC libraries made from a single starry flounder caught off Vancouver Island, British Columbia were screened using a probe to the wellconserved 3′ UTRs found in flounder AFPs. The tiling paths of 35 positive BACs were determined by PCR screening with a variety of primers (Fig. 2, Supplementary Table 1) and corresponded to two loci. The first locus was represented by 22 clones corresponding to two remarkably divergent alleles from a single multigene AFP locus (Fig. 2a,b). The two banks of AFPs are allelic as they share the same four flanking genes on each side, including those coding for collagen type 1, α1 (COL1A1) and histone deacetylase 5 (HDAC5) on the upstream side and xylosyltransferase 1 (XYLT1) downstream. The remaining 13 clones contained five closely spaced Gig2 genes ( Fig. 2c) with partial sequence similarity to AFPs. Based on the starry flounder genome size obtained from the Animal Genome Size Database (6.5 × 10 8 bp) (http:// www. genom esize. com/ index. php) this is consistent with a single gene locus. The greater number of clones for the AFP locus is consistent with the AFPs spanning a much larger DNA length (31 or 240 kb) than the Gig2 locus (17 kb) (Fig. 2). The two AFP alleles contain a vastly different number of AFP genes. The number of genes within both copies of the locus from this single fish differ greatly as one allele contain 33 AFPs, whereas the smaller contains only four (Fig. 3a). The difference between the two alleles is not a cloning artefact for two reasons. First, multiple BAC inserts were sequenced for each allele (Fig. 2a,b), and they were exact matches where they overlapped. Second, Figure 2. Schematic diagram of BAC clones which overlap (a) AFP allele 1, (b) AFP allele 2 and (c) the Gig2 locus. The 33 AFP genes in allele 1 and the four in allele 2 are indicated in blue (liver isoforms), green (skin isoforms) and pink (intermediate length "Midi" isoform and long "Maxi" isoforms). The deduced number of tandem repeats is indicated for allele 1. The sequenced BAC clones are indicated with cyan bars. The span of other BAC clones (grey bars with dashed lines indicating uncertainty) were determined by PCR using location-specific primers (purple arrows) and primers that were location and allele specific (orange arrows) (Supplementary Table 1). All clones were PCR positive using primers specific to the 3′ UTR found in both the Gig2 and AFP genes. www.nature.com/scientificreports/ the flanking regions of the two alleles are not identical, with around 3% divergence in DNA sequence, primarily within low-complexity regions. However, the protein sequences of the two genes immediately flanking the AFPs, HDAC5 and XYLT1 (Fig. 3a), are 100% identical. The structure of the larger allele (allele 1) is complex. Its 33 AFPs are flanked on both sides with partial gene sequences (pseudogenes) whereas the single pseudogene in allele 2 is downstream of the four AFPs (Fig. 3a). The downstream pseudogenes retain some of the coding sequence (Fig. 4a). Allele 1 contains twelve (Supplementary Fig. 1) nearly-identical 11.2 kb tandem repeats, each encoding both a skin and a liver AFP isoform, L1-L12 and S1-S12 (Fig. 3a, see "Nomenclature" in "Materials and Methods" for further details about gene/protein names). These are followed by nine additional AFPs; six skin isoforms (S13-S18), one longer liver isoform (Midi) and two long isoforms (Maxi-1, Maxi-2). Allele 2 lacks Maxi sequences and contains a single pair of genes encoding a skin and liver isoform (S1a, L1a), with high similarity to the pairs within the tandem repeats of allele 1 (Fig. 4b,c). This region of allele 2 is 94% identical, over 11.9 kb, to the repeat region of allele 1, and the two skin isoforms that follow, S2a and S3a, closely resemble S15 and S16, respectively ( Supplementary Fig. 2). Allele 2 could have arisen from allele 1 via two large deletions, the first removing 11 of 12 repeats through to Maxi-2, and the second removing S17 through S18. Alignments between these two alleles can share up to 98% identity over several kb, but all of these contain a few base insertions or deletions in addition to mismatches (not shown). A comparison of the four coding sequences in allele 2 to their closest matches in allele 1 show an average identity of 98.4%. AFP gene structure. All the AFP genes, with the exceptions of the pseudogenes that flank the locus, possess two exons (Fig. 5, partial data shown), the first of which is non-coding in the case of the skin isoforms, but which encodes most of the signal peptide in all other isoforms. The basis for identifying the flanking sequences as pseudogenes are as follows. The5′ pseudogene of allele 1 lacks a coding sequence but is identical over 80 bp to the 3′-end of the 3′ UTR of the liver, Maxi and some skin genes. The 3′ pseudogenes of both alleles contain partial coding sequences (16 a.a. or 33 a.a.) that are shorter than the shortest skin isoform (37 a.a.), and the Thr are not spaced at 11 a.a. intervals (Fig. 4a). Additionally, they lack the first exon due to the insertion of an ~ 2 kb LINE1 transposon (not shown), which would likely interfere with expression. The AFP locus from the single fish used to generate the BAC library is shown with the AFP-containing segment that differs from Pacific halibut and between the two alleles shown as a pop out. The AFP genes are colored as in Fig. 2 and are numbered sequentially by type. The ZG57 gene that was partially deleted at this location is in dark yellow and the XYLT1 gene is in maroon. The first 24 AFP genes (12 liver and 12 skin) occur in pairs within twelve nearly identical tandem repeats that are each 11.2 kb in length (shown compressed to one repeat × 12). These are flanked by two short segments (Ψ) that are highly similar to portions of the AFP genes. The second locus contains four AFPs denoted with the suffix "a" and one pseudogene. The black arrows show the boundaries of the locus 2 assembly. www.nature.com/scientificreports/ There are twelve 11.2 kb AFP-containing repeats in allele 1. The 11.2-kb repeats at the 5′ end of allele 1 were almost identical. By selecting and anchoring the longest reads to polymorphisms in the outer repeats, as described in supplementary materials and methods, the first 2.4 repeats and the last 1.5 repeats were unambiguously assembled. The interior repeats appeared virtually identical, so they were counted using a different method ( Supplementary Fig. 1). A subset of raw sequence reads, from two clones that overlapped the entire region (BAC45 and BAC182, Fig. 2) were analyzed. The number of reads corresponding to either the BAC vector or the repeat was compared. The larger BAC45 dataset indicated that there were likely 12 repeats (11.9 ± 0.6), overlapping the estimate of 11 repeats (11.2 ± 0.9) from the smaller BAC182 dataset. The lack of divergence of the internal repeats suggests that they may be undergoing rounds of expansion and contraction through unequal crossing over. The near identity of the twelve tandem 11.2 kb repeats is mirrored in the protein sequences of the repeats that were assembled. The four liver AFPs (L1, L2, L11, L12) are identical and the last of the three skin isoforms (S12) differs at just one a.a. residue from S1 and S2 (Fig. 4b,c). The AFPs fall into three main groups. The shortest encoded isoforms are the skin isoforms that lack both a signal peptide and propeptide (Fig. 4b). Most are 37-39 a.a. long with an acidic residue (Asp) at position 2 and a C-terminal basic residue (Arg) to interact with the helix dipole, as well as three Thr residues at 11 a.a. intervals. The exceptions have a C-terminal extension lacking Arg (S17, S14), a two-residue internal insertion (S14) and both a C-terminal extension and an additional 11 a.a. repeat (S18, 54 a.a.). One winter flounder skin isoform is identical to S3a and a second differs at a single residue 45 . The second group are secreted isoforms that have both a signal peptide and a propeptide that are cleaved from the mature AFP (Fig. 4c). The starry flounder liver isoforms in the 11.2 kb repeats are 38 residues long after processing, similar in length to the skin isoforms. The liver isoform of the second allele (L1a) has a single Asn mutation at one of the periodic Thr residues. These isoforms have several substitutions relative to their winter flounder counterparts 46,47 and a longer propeptide region. The sequence designated Midi is like the liver isoforms with a signal sequence and propeptide region that are thought to undergo the same N-terminal processing. However, instead of three 11-a.a. repeats, this isoform has six and the mature protein is intermediate in length (76 a.a.) between the shortest (37 a.a.) and longest (195 a.a.) isoforms (Fig. 4). The third group are the hyperactive Maxi isoforms (Fig. 4d), found only in allele 1, where they are adjacent to one another. These isoforms have a signal peptide, but they lack the propeptide domain found in the other liver isoforms. These 194-195 a.a. proteins are over five times longer than most of the skin and liver isoforms and align well with the two known hyperactive isoforms from winter flounder (Fig. 4d) 35,45 . The identity between the www.nature.com/scientificreports/ two starry flounder sequences, Maxi-1 and Maxi-2, is 82%. When compared to the winter flounder sequences, Maxi-1 is more like 5a (82%) than WF-Maxi (79%), whereas the opposite is true for Maxi-2 (79% to 5a vs. 84% to WF-Maxi). Maximum-likelihood phylogenetic analysis ( Supplementary Fig. 3) groups Maxi-1 with WF-5a and Maxi-2 with WF-Maxi, indicating that these two isoforms may have arisen prior to the separation of the winter flounder and starry flounder lineages, over 13 MA ago (Fig. 1). This is also consistent with the divergence (18%), between Maxi-1 and Maxi-2. The second cloned locus contains five copies of Gig2. The two BACs that were sequenced (Fig. 2c) from the Gig2 locus ( Fig. 3c) were identical, suggesting they originated from the same allele. The Gig2 genes lie between the metaxin-2 (MTX2) and cadherin-5 (CADH5) genes, so they reside at a different locus than the AFP genes. This locus was isolated because the Gig2 genes share up to 92% identity to a 252 bp segment of the 3′ UTR AFP probe used to screen the library. The five Gig2 genes in this locus were identified and annotated by comparison with well-characterized Gig2 genes from other fishes 42 . Gig2 has been shown to protect fish kidney cells in culture from viral infection 43 . One of the isoforms (Gig2-4) is 40 residues shorter than the others and may be a pseudogene. The four isoforms that are 147 a.a. long were aligned ( Supplementary Fig. 4) and they share 73-86% sequence identity. Notably, the sequence of these proteins does not resemble that of the AFPs as they contain little Ala. SMART analysis (http:// smart. embl-heide lberg. de/) suggests that residues 20-115 of Gig2-3 are similar to the poly(ADP-ribose) polymerase catalytic domain (expect value of 1.6 × 10 −6 ). Part 2: similar loci in other fishes. A syntenic Pacific halibut locus lacks AFPs but contains Gig2 and ZG57 genes. A high-quality genome sequence is available for the Pacific halibut (GenBank Assembly GCA_013339905.1) 48 , a species in the same family (Pleuronectidae) as starry flounder. These species shared a common ancestor around 20 MA ago (Fig. 1). The region of its genome corresponding to where the AFP locus is in the starry flounder shares the same flanking genes on either side, including COL1A1, HDAC5, XYLT1 and FUS, but it completely lacks AFP genes (Fig. 3b). Instead, it contains four Gig2 genes. These were annotated in the GenBank deposition (XM_035180664.1) as one combined Gig2 gene with adjustments for frameshifts. Conspecific transcriptomic sequences in the Sequence Reads Archive database at NCBI 49 were inconsistent with this combined gene model, so they were reannotated to show four copies of Gig2, each with a small non-coding exon followed by a coding exon as in the starry flounder Gig2 genes. The first two genes encode proteins that are highly similar (71-80% identity) to the starry flounder Gig2 proteins (Supplementary Fig. 4). The next two contain frameshifts that disrupt the reading frames, so like Gig2-4 in starry flounder, these may be pseudogenes. There was one gene found downstream of HDAC5 in Pacific halibut, just upstream of the Gig2 genes, that was not found in starry flounder (Fig. 3b). This gene is well conserved, contains two exons, and encodes gastrula zinc finger protein XlCGF57.1 (ZG57), a 56.3-kDa protein that shares no similarity with AFPs. The Pacific halibut locus that is syntenic to the Gig2 locus in starry flounder lacks Gig2 genes. The region of the genome in Pacific halibut that corresponds to the Gig2 locus of starry flounder was also characterized (Fig. 3d). Although the flanking genes, MTX2, CADH5 and BEAN1, were well conserved, there is a complete absence of Gig2-like sequences at this location. The microsynteny of Gig2 genes varies among fishes but is unique in starry flounder. The Gig2 loci of species closely related to starry flounder, with genome assemblies sufficiently long to span Gig2 and neighbouring genes, were characterized (Table 1). Species within the same family (Pleuronectidae) as the starry flounder and Pacific www.nature.com/scientificreports/ halibut share microsynteny with the halibut, with HDAC5 and ZG57 upstream and XYLT1 downstream of the Gig2 locus (Table 1 and Fig. 3b). More variability is found in selected species outside the Pleuronectidae, with RAB40C in place of HDAC5 in several species and UNK93 in place of XYLT1 in one (Table 1). However, none of these Gig2 loci are flanked by either MTX2 or CADH5, as in starry flounder (Fig. 3c). These observations support the hypothesis expanded on below, that the AFP arose from the original Gig2, following the latter's gene duplication and relocation in an ancestor of the starry flounder. Starry flounder AFPs are homologous to AFPs from other Pleuronectiformes. The homology of the winter flounder and starry flounder AFPs is apparent from the similarity of their non-coding sequences. A 2.9 kb portion of a 7.8 kb tandemly-repeated gene from winter flounder encodes a liver isoform 50 . Most (88%) of this sequence, which is primarily non-coding, has over 84% identity to the starry flounder 11.2 kb repeat ( Supplementary Fig. 5). It was not determined if this winter flounder repeat DNA also contained a skin isoform. Additional winter flounder genomic sequences, initially identified as pseudogenes 45 , are also highly similar to starry flounder sequences. Two skin genes , are most like S14, with 90% and 85% identity respectively. Additionally, the WF-5a gene (GenBank accession AH002489.2) is over 80% identical to both Maxi-1 and Maxi-2 over most of its length. The non-coding sequence of the mRNA encoding an AFP (GenBank accession X06356.1) from the more distantly-related yellowtail flounder ( Fig. 1) 12 , is also highly similar to that of the starry flounder liver isoform within the repeats. The 5′ UTR (30 bp) is 93% identical and the 5′ UTR is (96 bp) is 96% identical to the liver isoforms in the 11.2 kb repeat. Similar comparisons to the non-coding regions of the type I AFPs of other orders (Fig. 1) failed to identify any similarity, as was found when comparisons were done using winter flounder sequences 15 . Part 3: the origin of the flounder AFP genes. Remnants of three genes indicate that the AFP genes arose at their current location. The region containing the starry flounder AFPs was compared to the flanking sequences and to the Pacific halibut ZG57 locus (Fig. 3). A portion of the ZG57 gene containing the first exon and part of the intron is found just upstream of the first AFP pseudogene in allele 1 (Fig. 3a. yellow bar). This segment encodes 22 a.a. that closely resemble the N-terminal sequence of the halibut protein, but several frameshifts thereafter disrupt the reading frame, and the second exon is absent, so this gene is no longer functional (not shown). Sequences similar to various regions of ZG57 are found scattered throughout the AFP region and some of these are indicated in dark yellow in Fig. 5a. Similarly, segments corresponding to the 5′ region of the downstream XYLT1 gene are also found scattered about, and while only one small segment is found in the region shown in Fig. 5a in maroon, three segments totaling 2.2 kb are found within the 11.2 kb repeats (not shown). Some AFPs, such as Maxi-2 (Fig. 5a), are flanked by both ZG57 and XYLT1 segments. ZG57 segments are always upstream and XYLT1 segments are always downstream of AFPs. This suggests that a single AFP gene arose between ZG57 and XYLT1 and that when the AFP locus expanded, portions of these flanking genes were duplicated along with the AFP. Gig2 was likely the AFP progenitor. A comparison of the Gig2 and AFP loci of starry flounder indicated that there were many stretches of similar sequence, some of which are shown in Fig. 5a. As these matches cover a significant portion of the AFP gene, except for the coding sequence, this suggests that the AFP gene arose from the Gig2 gene. Furthermore, the greater number of matches to S15 than to Maxi-2 suggests that the skin gene likely arose first and that subsequent alterations, in which regions similar to Gig2 were lost, gave rise to the Maxi genes. A more detailed comparison is shown between the skin and liver AFPs within the 11.2 kb repeat and the Gig2-2 locus (Fig. 5b). Here again, the skin AFP is more like Gig2 with regions of similarity beginning before and extending across the non-coding exon 1, continuing throughout much of the intron and into exon 2, up to and including the start codon. The coding sequences of S2 and Gig2-2 share no significant similarity, but similarity begins again downstream of the coding sequence. The matches between Gig2 and the liver AFP are more limited, including in the presumptive promoter/enhancer region upstream of the gene, and resemble those between Gig2 and Maxi-2. A dot plot comparison of the predicted mRNA sequences of S1 and a second Gig2 gene, Gig2-3 showed four segments with similarity (Fig. 6a). Sequence alignments between the genes in these vicinities are shown in Fig. 6b-f. The similarity between the non-coding first exon of both genes is evident with a match of 39 out of 44 bp, with the similarity extending further, both 5ʹ of the gene and downstream into the intron (Fig. 6b). The match at the start of exon 2 also extends into the intron, but the sequences diverge downstream of the start codon (Fig. 6c). There is but one short segment showing 66% identity within the coding region (Fig. 6a,d). Figure 6. Alignments between Gig2-3 and AFPs. (a) Dot plot comparison of the mRNA sequences of Gig2-3 to S1 generated using YASS 51 . The two exons are indicated by rectangles and the coding sequence of Gig2 by the yellow/orange striped background and that of the AFP with a blue striped background. (b-f) Exon-spanning alignments of the gene sequences of Gig2-3 and S1, corresponding to the segments identified in (a). Exons are in uppercase font, highlighted grey if non-coding or as in (a) if coding. Percent identities and alignment length are at the end of each aligned segment. Genic matches not overlapping exons are not shown. Residues modelled as helical within Gig2-3 (Fig. 7) are shown in (d) in red, the stop codon for S1 is 31 bp upstream (not shown) of the Gig2-3 stop codon in (e), and the polyadenylation signal is underlined in (f). (g) Match between Gig2-3 and Maxi-2 spanning exon 1 only. The signal peptide sequence is shown along with a translation of the corresponding region of the non-coding Gig2 exon. The base numbers shown correspond to GenBank Accessions OK041465 (Gig locus) and OK041463 (AFP locus 1). www.nature.com/scientificreports/ The last two matches are downstream of the coding sequence, the first of which starts right at the stop codon of Gig2-3 and 31 bp downstream of the stop codon of S1 (Fig. 6e). The second extends into the 3ʹ region and overlaps a presumptive poly-adenylation signal (Fig. 6f). As mentioned previously, exon 1 of both Gig2 and skin AFPs is non-coding, but for the liver and Maxi AFPs, it encodes a signal peptide. Despite this, an alignment of the Maxi-2 and Gig2-3 regions spanning this exon shows that a limited number of mutations, such as AGG to ATG to introduce a start codon, along with a small insertion of 23 bases, were sufficient to convert the exon to a signal-peptide encoding sequence (Fig. 6g),. This indicates that the signal peptide arose in situ, from the non-coding exon of Gig2. Possible origins of the AFP coding sequence. Flounder AFP is Ala rich and these straight α helices provide a flat surface that interacts with ice 33,37 . In contrast, Gig2 has a lower-than-average Ala content (~ 5%), with only one 5 a.a. segment, ACATA, found in two isoforms ( Supplementary Fig. 4) that resembles the Ala-rich AFP sequence. This sequence is encoded by the region of similarity detected by dot matrix analysis (Fig. 6a,d). If this region gave rise to a type I AFP, it would be expected to reside within a surface-exposed α helix. Fortunately, the structure of a homolog, poly(ADP-ribose) polymerase catalytic domain, is known and the Phyre2 52 homology model of Gig2 (Fig. 7) shows that this ACATA segment is likely surface exposed and is located on the longest helical segment predicted for this globular protein. The AlphaFold2 44 de novo model is very similar and predicts the same surface exposed helix. Deletion of most of the coding sequence, followed by amplification of this short segment, could have given rise to a primordial AFP. Alternatively, a GC-rich sequence encoding numerous Ala residues, such as such as (GCC) n , could have replaced the Gig2 coding sequence. Table 1). Within the flounder lineage, a gene duplication event led to additional copies of the Gig2 gene at the second locus, between MTX2 and CADH5 (Fig. 3c). The original Gig2 genes were then redundant, and one underwent changes that generated a skin AFP. This could have come about if the short Ala-containing segment within the α-helix region expanded (Fig. 6d) or if a segment of repetitive, GC-rich DNA replaced the coding sequence. The gene was then duplicated an unknown number of times, at this location, as shown by the many segment within the AFP locus that are similar to the ZG57 and XYLT1 genes (Fig. 5a). Eventually, the non-coding exon 1 of one duplicate evolved into encode a signal peptide (Fig. 6g). Further gene duplications and/or gene losses (as can be (b) Gig2-3, modelled using Phyre2 52 , was aligned with 100% confidence over 89% of its length to the template PDB:3C4H. (c) Gig2-3 modelled without a template using simplified AlphaFold 2.0 44 . The first eight residues (5%) were removed as they were modelled with low confidence. The images were generated using PyMOL 28 and are shown in cartoon mode with small spheres representing side chains for Ala residues (cyan) and Thr residues (blue). The other residues are coloured by secondary structure with α-helices in red, β-strands in yellow and coils in green. Allele 2 is more prevalent in starry flounders from warmer waters. The fish that was used to construct the library, and which had the two differing AFP alleles, was caught in southerly Canadian waters of the North Pacific, off the western side of Vancouver Island (pink/green circle, all locations are shown in Fig. 8a). In contrast, a genomic Southern blot of four fish collected from the Haida Gwaii, approximately 300 km further north (location 1), showed that the larger AFP allele 1 was prevalent at this location ( Fig. 8b-2). Two intense bands, corresponding to the skin and liver genes within the 11.2 kb repeat, confirm the repetitive nature of this repeat. Bands corresponding to the predicted sizes of all the other genes from allele 1 were also observed, further confirming the accuracy of our assembly. A more detailed analysis of the correspondence between these bands and the two AFP alleles is shown in Supplementary Figure 7. There is some evidence of limited polymorphism as a few unexplained bands were present in one or two of the fish, but all these fish appear to be homozygous for alleles very similar to allele 1, as bands corresponding to the unique and well-separated fragment sizes expected for S2a, S3a and S4a were not observed. In contrast to the large AFP copy number of the more northerly starry flounder, a fish caught in Monterey Bay, California (location 4), only has bands consistent with allele 2 (Fig. 8b-4). Although at a similar latitude as the sequenced flounder from the west coast of Vancouver Island, the fish caught in the warmer slightly brackish waters of English Bay, off Vancouver (location 3), had bands consistent with allele 2, along with some moderately intense bands consistent with the skin and liver genes within the 11.2 kb repeats (Fig. 8b-3). We speculate that it contains an allele similar to allele 2 that still has a small number of 11.2 kb repeats remaining. A fish from Alaska (location 1), approximately 1500 km further north from Haida Gwaii, had many intense bands with sizes that were not consistent with either allele (Fig. 8b-1). Together, these results suggest that gene copy number is correlated with risk of ice exposure and that numerous alleles with differing numbers of AFP genes can be found within this species. www.nature.com/scientificreports/ Discussion Taxonomically restricted genes (TRGs) confer phenotypic novelty on their hosts and the selective pressures of new environments often provide the driving force for their development 53,54 . For example, water striders have colonized the water surface due in part to TRGs that generate a "fan" on the middle leg that provides propulsion across the surface 55 . Similarly, the climate cooling that intensified during the latter half of the Cenozoic Era generated an icy sea environment that had been absent for at least tens of Ma 27,31 , and which would have excluded fish from shallow water niches where ice is found until the AFP genes arose in certain species, including the recent ancestors of the starry flounder. These and other TRGs arise in a variety of ways 53 , including via duplication and divergence of existing genes, as for example with AFGP, type II and type III AFP 22,18,16 , or de novo from non-coding DNA (AFGP 21,23 ). It can be difficult to determine the mechanism, as selection for a new function can lead to rapid divergence, erasing the similarity to the progenitor sequence 56 . This erasure likely occurred with the coding sequence of the flounder AFP gene as it bears little similarity to the Gig2 progenitor. Fortunately, the AFP arose recently, so extensive similarity between the flanking regions of the two genes was retained (Figs. 5 and 6). Additionally, the lineage-specific duplication of the Gig2 genes at a second locus, as well as sequential duplications of segments of the flanking genes at the original locus (Figs. 3 and 5), shows that the AFP gene arose, in situ, at the original Gig2 locus via gene duplication and divergence. It is now clear that the AFPs of Pleuronectiformes, such as starry flounder, are not homologous to the type I AFPs found in the other three lineages (snailfish, cunner and sculpin) within Perciformes and Labriformes, as these other AFPs lack similarity to Gig2. It was proposed that the snailfish AFP could have arisen from a frameshifting of the Gly-rich region of either keratin or chorion cDNAs that were inadvertently cloned along with the AFP genes 57 . However, the similarity did not extend into non-coding segments. As all these genes arose within the last ~ 20 Ma, they would be expected, like the flounder's, to retain some evidence of their origins in their non-coding regions, since diversifying selection would be lower here. Currently, the origin of the three other type I AFPs remains unknown. The convergence of the AFPs from four lineages to Ala-rich helices, sometimes with Thr residues at 11 a.a. residues 9,10,15,34 , suggests that this motif is well-suited to interacting with ice. Similar convergence, albeit with a different structural framework, was seen with arthropod AFPs that adopt a β-helical conformation. A beetle (yellow mealworm) and a fly (midge) produce tight, disulfide-stabilized solenoids, with an ice-binding surface composed of a double row of Thr residues or a single row of Tyr residues, respectively 58,59 . The looser solenoid of the moth (spruce budworm) is more triangular and lacks bisecting disulfide bonds, but like the beetle AFP, its ice-binding surface consists of a double row of Thr residues 60 . This suggests that there are nascent structures with propensities to evolve into AFPs, but that different types are more likely to arise in marine versus terrestrial environments because of the vastly different requirements for freezing point depression. When a novel gene arises from a pre-existing one, non-coding sequences are thought to be almost as important as coding sequences 61 . It is likely that the promoter and enhancer sequences controlling expression of the Gig2 gene were co-opted, for two reasons. First, the skin genes and Gig2 share high identity upstream of the first exon. Second, the expression patterns of Gig2 in zebrafish 42 and the winter flounder skin AFPs 34 are similar as they are expressed in a variety of tissues. The tissue-and season-specific enhancement of the liver AFPs 62 may have arisen later, given that its gene lacks similarity to the upstream regions of the Gig2 gene. However, all the genes retain the two exons and the polyadenylation signal. The rapid divergence of the starry flounder AFP coding sequence from the Gig2 progenitor is reminiscent of that observed for the AFGP that was derived from the trypsinogen gene 22 . For the AFP, a 35 bp segment, corresponding to 10 a.a. in a helical region of the protein, was likely retained and amplified (Figs. 6 and 7). For AFGP, the amplified segment was only 9 bp long and it overlapped the acceptor splice junction at the start of exon 2. Both gene types retained the first exon, which is non-coding in skin AFPs and Gig2, but which encodes a signal peptide in both AFGP and trypsinogen. However, the first exon of the flounder liver, Midi and Maxi genes does encode a signal peptide and similarity with the Gig2 non-coding exon shows that it arose, in situ. This is reminiscent of the origin of the signal peptide of type III AFP 18 , where an additional 54 bp in exon 1 gained coding potential, generating a signal peptide. One explanation for rapid divergence of specific portions of DNA sequence, such as the signal peptides mentioned above, is positive Darwinian selection, where the rate of non-synonymous (missense) to synonymous (silent) mutations at certain positions is higher than expected under either a neutral or negative model of selection 63 . Such selection has also been observed in numerous surface-exposed residues of the globular type III AFP sequences from fish and the solenoid AFP from beetles 64 . Given that there are far fewer structural constraints on isolated α-helical peptides than on the two aforementioned AFPs, any mutations that increased helical content or the ability to bind to ice could be subject to strong positive selection in fishes exposed to ice in a cooling ocean. The result would be higher divergence of the coding sequences relative to non-coding sequences, as seen between the AFP and Gig2 sequences of the starry flounder. The number of AFP genes was higher in starry flounders from the northern waters of Alaska and British Columbia than in flounders from more southerly waters (Fig. 8). Variation in gene copy number was also observed in winter flounder from different regions along the Atlantic coast, with animals from warmer waters having fewer genes 65 . The same pattern has been observed for ocean pout, which can have up to ~ 150 genes that produce type III AFP 66 . As many of the AFP genes are arranged in tandem arrays, they are likely prone to rapid expansion and contraction via unequal crossing over 67 , providing variation that would be subject to environmental selection. Gene duplication also provides additional copies that can undergo neofunctionalization 67 , which is how the three main classes of type I AFPs found in flounders (Maxi, liver and skin) arose. The properties of these isoforms differ dramatically as Maxi is far more active than either the skin or liver isoforms 36 , and expression of the liver isoform is extremely high in this tissue 68 www.nature.com/scientificreports/ majority of the skin and liver genes in the shorter starry flounder AFP allele. A similar process may have occurred in the American plaice. Despite being closely related to the yellowtail flounder that possesses both liver and Maxi isoforms 12,14,24 (Fig. 1), American plaice serum only contains Maxi-like AFPs 14 . This suggests that the common ancestor of both of these fish had the liver isoform and that the plaice locus may have undergone contraction, losing the small liver-specific AFP genes. Similar processes, working on a smaller scale, may also be responsible for the generation of isoform variation. For example, liver-like isoforms with extra copies of the 11-a.a. repeat are found in both starry flounder (Midi with three extra repeats) and yellowtail (one extra repeat 12 ). This plasticity may also explain why the banding pattern from the Alaskan starry flounder observed by Southern blotting is so different from that of fish from Haida Gwaii (Fig. 8), despite both having large numbers of AFP genes. In summary, the origin of the flounder AFP from the gene encoding the globular, antiviral Gig2 protein, via gene duplication and divergence, has been determined. Detailed comparisons between the two loci elucidate the steps involved in the evolution of the AFP. Although the flounder AFP is superficially similar to the type I AFPs of other groups, all of which are extended alanine-rich alpha-helical proteins of varying length, it clearly arose by convergent evolution. The two extended loci that were characterized from starry flounder encode either the AFP genes or five of the Gig2 progenitor genes. The two AFP alleles sequenced contain either four or 33 AFP genes, indicating that gene copy number can vary dramatically. These genes encode skin, liver and Maxi AFPs, with the number of AFP genes being higher in fish that inhabit colder waters. Materials and methods BAC library construction, screening and sequencing. A BAC (bacterial artificial chromosome) library was constructed by Amplicon Express (Pullman, Washington, USA) from genomic DNA from an individual starry flounder captured off the west coast of British Columbia. Fish tissues were harvested from euthanized fish in accordance with the Canadian Council on Animal Care Guidelines and Policies with approval from the Animal Care and Use Committee at Queen's University. A total of 12 clones that hybridized to the 3ʹ untranslated region (UTR) of an AFP transcript were sequenced at the Génome Québec Innovation Centre (Montreal, Quebec, Canada) using the PacBio RS II single molecule real-time (SMRT®) sequencing technology (Pacific Biosciences, Menlo Park, California, USA). DNA assembly, gene annotation and Southern blotting. The initial assembly was done by the Génome Québec Innovation using the Celera assembler 69 . The overlapping regions of different clones were identical except at longer homopolymer or dinucleotide repeat regions. A region containing near-identical 11.2 kb repeats was assembled and evaluated separately, yielding 3.9 assembled repeats out of 12 total, as described in Supplementary Materials and Methods. Genes were annotated using homologs from other fish. DNA from starry flounders collected at various locations from California to Alaska was Southern blotted and the blots were evaluated using various 32 P-labelled various probes to AFP genes. A more detailed description of all procedures can be found in Supplementary Materials and Methods. Nomenclature. Genes are differentiated from proteins using italics. For simplicity, AFPs from starry flounder are named by class with "liver" for small circulating isoforms, "skin" for small isoforms first isolated from skin, "Midi" for an isoform of intermediate size and Maxi for the large circulating isoforms. Numbering is used for classes with multiple isoforms, such as S1 and L1 for the first skin and liver gene at allele 1 respectively. Isoforms from allele 2 are differentiated by letter a (S1a, L1a for example) whereas those from winter flounder are preceded by WF. Data availability The starry flounder sequences generated during the current study and the Pacific halibut sequences they were compared to are available from GenBank under accession numbers OK041463, OK041464 and OK041465, NC_048942 (845791 bp to 1041091 bp) and NC_048938 (22286642 bp to 22384527 bp). The structure of type I AFP was obtained from the Protein Data Bank, accession 1WFA.
Apply the Feature of Entropy Convergence of ACO to Short the Runtime of Gene Order Alzheimer disease (AD) is the most common form of dementia. To find a way of cure it, gene study is necessary. And gene order is a new conception of gene study currently, where gene order refers to a permutation of genes in which similar genes are ordered together one by one, and optimal gene order can be abstracted as shortest TSP route. Currently only two types of tools are reported to calculate gene order, which are Genetic Algorithm (GA) and Ant Colony Optimization (ACO). In these two types, one bottleneck of computation is that their runtime is too long while gene data is too large. To weaken the bottleneck, in this paper, the feature of entropy convergence of ACO is used as the termination criterion of ACO to speed up the computation of AD gene order. Experiment shows that the method proposed in this paper has obvious advantage on runtime and solution quality.
/* This function takes in function f as an argument and runs it on each and every fraction in the element. Parameters of function f should be as follows: - The first parameter is the index of the fraction - The second parameter is the numerator of the fraction - The third parameter is the denominator of the fraction */ void DataBase_forEachValueInStoredFractions(void(*f)(int,int,int)) { for (int i = 0; i < NumStoredFractions; i++) { int *myArray = StoredFractions[i]; f(i,myArray[0],myArray[1]); } }
Optimal ordering policy in a two-echelon supply chain model with variable backorder and demand uncertainty The paper investigates a two-echelon production-delivery supply chain model for products with stochastic demand and backorder-lost sales mixture under trade-credit financing. The manufacturer delivers the retailer’s order quantity in a number of equal-sized shipments. The replenishment lead-time is such that it can be crashed to a minimum duration at an additional cost that can be treated as an investment. Shortages in the retailer’s inventory are allowed to occur and are partially backlogged with a backlogging rate dependent on customer’s waiting time. Moreover, the manufacturer offers the retailer a credit period which is less than the reorder interval. The model is formulated to find the optimal solutions for order quantity, safety factor, lead time, and the number of shipments from the manufacturer to the retailer in light of both distribution-free and known distribution functions. Two solution algorithms are provided to obtain the optimal decisions for the integrated system. The effects of controllable lead time, backorder rate and trade-credit financing on optimal decisions are illustrated through numerical examples.
import sys data = sys.stdin.read().split("\n") n = int(data[0]) e = list(map(int, data[1].split(" "))) strs = data[2:] S = [(x, x[::-1]) for x in strs] dp = [[float('inf')]*2 for _ in range(n)] dp[0][1] = e[0] dp[0][0] = 0 for i in range(n-1): for j in range(2): if( S[i][0] <= S[i+1][j] ): dp[i+1][j] = min(dp[i+1][j], dp[i][0] + j*e[i+1]) if( S[i][1] <= S[i+1][j]): dp[i+1][j] = min(dp[i+1][j], dp[i][1] + j*e[i+1]) if min(dp[n-1]) == float('inf'): print('-1') else: print(min(dp[n-1]))
Study of the histological profile of papillary thyroid carcinomas associated with Hashimoto's thyroiditis. OBJECTIVE To investigate the association between the histological parameters of papillary thyroid cancer (PTC) and the presence of Hashimoto's thyroiditis (HT). MATERIALS AND METHODS Histological samples from patients with PTC were reviewed by an endocrine pathologist. The following parameters were analyzed: presence of concomitant HT, multifocality, presence of nodal metastasis, tumor size, vascular invasion, perineural infiltration, histological variant, and pathological staging. Clinical data included gender and age at the time of the diagnosis. RESULTS A total of 94 cases of PTC were reviewed. There was a predominance of women (85.1% vs. 14.9%) and median age at presentation was 45.13 years. The presence of HT was significantly associated with greater occurrence of multifocal tumors (p = 0.004), early pathological stage (p = 0.02), and smaller tumor size (p = 0.025). CONCLUSIONS Patients with PTC associated with HT had significantly smaller tumors, more often multifocal and in an earlier stage than their counterparts without HT. A better understanding of the immune response involved in these tumors may be useful for future strategies on the prevention and for the development of new therapeutic approaches for this group of neoplasms.
<reponame>theGloves/ContractTool<filename>app.py from flask import Flask, request, render_template, redirect import db import util import json from app.DGA import * app = Flask(__name__) @app.route('/') def hello_world(): ConctracList = db.getConctracList() return render_template('contractList.html', contractList = ConctracList), 200 @app.route('/createIndex') def createIndex(): return render_template('ContractIndex.html'), 200 @app.route('/getDGA', methods=['POST']) def getDGA(): contractId = request.form.get('contractId', default='id') print(contractId) fsm_struct = util.read_fsm(contractId) print(fsm_struct) res = {'fsm': fsm_struct} return json.dumps(res), 200 @app.route('/saveContract', methods=['POST']) def saveContract(): args = request.get_json() contract_id = util.get_id(args['contract_name']) jsondata = json.dumps(args['content']) GenerateDGA(jsondata, contract_id) res = db.save_contract(args['contract_name'], contract_id, args['Obligor'],args['creditor'], jsondata) ConctracList = db.getConctracList() return render_template('contractList.html', contractList = ConctracList), 200 @app.route('/content/<contractId>') def getContent(contractId): contract = db.getContent(contractId) content = [] content.append(contract[0]) content.append(contract[1]) content.append(contract[2]) content.append(contract[3]) content.append(json.loads(contract[4])) return render_template('ContractIndex.html', id=contractId, contract=content), 200 @app.route('/ContractList') def showlist(): ConctracList = db.getConctracList() return render_template('contractList.html', contractList = ConctracList), 200 @app.route('/DGA/<contractId>') def showDGA(contractId): return render_template('DGA.html', contractId = contractId), 200 if __name__ == '__main__': config = util.get_config() host = config["host"] port = int(config["port"]) debug = config["debug"] if debug == "True": debug = True else: debug = False app.run(host=host, port=port, threaded=True, debug=debug)
package frc.robot.Drivetrain; import edu.wpi.first.math.controller.PIDController; import edu.wpi.first.math.filter.SlewRateLimiter; import frc.lib.Signal.Annotations.Signal; import frc.lib.Util.MapLookup2D; public class AzimuthAngleController{ final double MAX_AZMTH_SPEED_DEG_PER_SEC = 720.0; // TODO, maybe go faster? PIDController azmthPIDCtrl = new PIDController(0,0,0); double desAng = 0; @Signal(units="deg") double actAng = 0; @Signal(units = "deg") double angSetpoint = 0; @Signal(units = "deg") double desAngleRateLimit = 0; double azmthMotorCmd = 0; double netSpeed = 0; @Signal boolean invertWheelDirection = false; MapLookup2D azmthCmdLimitTbl; double desAnglePrev = -123; public AzimuthAngleController(){ azmthPIDCtrl.enableContinuousInput(-180, 180); azmthCmdLimitTbl = new MapLookup2D(); azmthCmdLimitTbl.insertNewPoint(0.0, 1.0); azmthCmdLimitTbl.insertNewPoint(1.0, 1.0); azmthCmdLimitTbl.insertNewPoint(3.0, 0.5); azmthCmdLimitTbl.insertNewPoint(5.0, 0.1); azmthCmdLimitTbl.insertNewPoint(9.0, 0.1); } public void setInputs(double desiredAngle_in, double actualAngle_in, double curSpeed_fps_in){ desAnglePrev = desAng; desAng = desiredAngle_in; actAng = actualAngle_in; netSpeed = curSpeed_fps_in; } public void update(){ desAngleRateLimit = desAng; //todo - fancy rate limiting... needed? azmthMotorCmd = azmthPIDCtrl.calculate(actAng, desAngleRateLimit); azmthMotorCmd = limitMag(azmthMotorCmd, azmthCmdLimitTbl.lookupVal(netSpeed)); } public void setGains(double kP, double kI, double kD){ azmthPIDCtrl.setP(kP); azmthPIDCtrl.setI(kI); azmthPIDCtrl.setD(kD); } public double getMotorCmd(){ return azmthMotorCmd; } public double getErrMag_deg(){ return Math.abs(desAng - actAng); } public double getSetpoint_deg(){ return desAng; } private double limitMag(double in, double magMax){ if(Math.abs(in) > magMax){ return Math.signum(in) * magMax; } else { return in; } } }
/** * Encapsulates information about a single cluster of chunks, immutable type. */ public class Cluster { /** * Constructs a cluster. * * @param worldSeed world seed * @param chunkX chunk X position * @param chunkZ chunk Z position * @param size size of that cluster */ public Cluster(long worldSeed, int chunkX, int chunkZ, int size) { this.worldSeed = worldSeed; this.chunkX = chunkX; this.chunkZ = chunkZ; this.size = size; } @Override public String toString() { return String.format("Cluster of %s chunks at %s, %s (seed: %s).", size, chunkX * 16, chunkZ * 16, worldSeed); } /** * Returns the world seed. * * @return world seed */ public long getWorldSeed() { return worldSeed; } /** * Returns chunk position on the X axis. * * @return X chunk position */ public int getChunkX() { return chunkX; } /** * Returns chunk position on the Z axis. * * @return Z chunk position */ public int getChunkZ() { return chunkZ; } /** * Returns the amount of chunks in this cluster. * * @return size */ public int getSize() { return size; } private final long worldSeed; private final int chunkX, chunkZ, size; }
// https://leetcode-cn.com/problems/number-of-valid-words-for-each-puzzle/ pub fn find_num_of_valid_words(words: Vec<String>, puzzles: Vec<String>) -> Vec<i32> { todo!() } // bit_manipulation hash_table #[test] #[ignore] fn test1_1178() { use leetcode_prelude::vec_string; assert_eq!( find_num_of_valid_words( vec_string!["aaaa", "asas", "able", "ability", "actt", "actor", "access"], vec_string!["aboveyz", "abrodyz", "abslute", "absoryz", "actresz", "gaswxyz"], ), vec![1, 1, 3, 2, 4, 0] ); }
// GetIncType returns IncType based on which segment of the Version was changed. func GetIncType(oldVersion, newVersion *semver.Version) IncType { if newVersion.Major() > oldVersion.Major() { return IncTypes.Major } if newVersion.Minor() > oldVersion.Minor() { return IncTypes.Minor } if newVersion.Patch() > oldVersion.Patch() { return IncTypes.Patch } return IncTypes.None }
package com.createchance.imageeditor.ops; import com.createchance.imageeditor.drawers.BokehFilterDrawer; /** * Bokeh filter operator. * * @author createchance * @date 2018/11/28 */ public class BokehFilterOperator extends AbstractOperator { private static final String TAG = "BokehFilterOperator"; private BokehFilterDrawer mDrawer; private float mRadius = 0f; public BokehFilterOperator() { super(BokehFilterOperator.class.getSimpleName(), OP_BOKEH_FILTER); } @Override public boolean checkRational() { return true; } @Override public void exec() { mContext.attachOffScreenTexture(mContext.getOutputTextureId()); if (mDrawer == null) { mDrawer = new BokehFilterDrawer(); } mDrawer.setResolution(mContext.getRenderWidth(), mContext.getRenderHeight()); mDrawer.setRadius(mRadius); mDrawer.draw(mContext.getInputTextureId(), 0, 0, mContext.getSurfaceWidth(), mContext.getSurfaceHeight()); mContext.swapTexture(); } public float getRadius() { return mRadius; } public void setRadius(float mRadius) { this.mRadius = mRadius; } }
import sys class Reader: def __init__(self, file): self.tok = [] self.lines = file.readlines() self.tok_position = 0 self.tok_length = 0 self.line_position = 0 def next_token(self): if self.tok_position < self.tok_length: self.tok_position += 1 return self.tok[self.tok_position - 1] self.tok = self.lines[self.line_position].split() self.tok_length = len(self.tok) self.tok_position = 0 self.line_position += 1 return self.next_token() def next_line(self): self.tok = [] self.tok_length = 0 self.tok_position = 0 self.line_position += 1 return self.lines[self.line_position - 1] def next_int(self): return int(self.next_token()) def main(): reader = Reader(sys.stdin) n = int(reader.next_line().strip()) letters = "" letter = 'A' while letter <= 'Z': letters += letter letter = chr(ord(letter) + 1) digits = "" for digit in range(10): digits += str(digit) def convert_col_from_text(col_text): result = 0 for ch in col_text: result = result * 26 + (ord(ch) - ord('A') + 1) return result def convert_col_to_text(col): result = "" while True: result += chr(ord('A') + ((col - 1) % 26)) col = (col - 1) // 26 if col == 0: break return result[::-1] def is_letter(ch): return (ch >= 'A') and (ch <= 'Z') for i in range(n): line = reader.next_line().strip() elements = list() element_is_letters = False for ch in line: if (len(elements) == 0) or (element_is_letters != is_letter(ch)): elements.append("") elements[-1] += ch element_is_letters = is_letter(ch) continue elements[-1] += ch if len(elements) == 2: col_text = elements[0] row_text = elements[1] row = int(row_text) col = convert_col_from_text(col_text) print("R%sC%s" % (row, col)) elif len(elements) == 4: row = int(elements[1]) col = int(elements[3]) col_text = convert_col_to_text(col) print("%s%s" % (col_text, row)) else: assert(False) if __name__ == "__main__": main()
/** * Recover the nearest matching code to a specified location. * Given a short Open Location Code of between four and seven characters, * this recovers the nearest matching full code to the specified location. * The number of characters that will be prepended to the short code, depends * on the length of the short code and whether it starts with the separator. * If it starts with the separator, four characters will be prepended. If it * does not, the characters that will be prepended to the short code, where S * is the supplied short code and R are the computed characters, are as * follows: * SSSS -> RRRR.RRSSSS * SSSSS -> RRRR.RRSSSSS * SSSSSS -> RRRR.SSSSSS * SSSSSSS -> RRRR.SSSSSSS * Note that short codes with an odd number of characters will have their * last character decoded using the grid refinement algorithm. * Args: * shortCode: A valid short OLC character sequence. * referenceLatitude: The latitude (in signed decimal degrees) to use to * find the nearest matching full code. * referenceLongitude: The longitude (in signed decimal degrees) to use * to find the nearest matching full code. * Returns: * The nearest full Open Location Code to the reference location that matches * the short code. Note that the returned code may not have the same * computed characters as the reference location. This is because it returns * the nearest match, not necessarily the match within the same cell. If the * passed code was not a valid short code, but was a valid full code, it is * returned unchanged. */ public String recoverNearest(String shortCode, double referenceLatitude, double referenceLongitude) { if (!isShort(shortCode)) { if (isFull(shortCode)) { return shortCode; } else { throw new IllegalArgumentException("ValueError: Passed short code is not valid: " + shortCode); } } referenceLatitude = clipLatitude(referenceLatitude); referenceLongitude = normalizeLongitude(referenceLongitude); shortCode = shortCode.toUpperCase(); int paddingLength = SEPARATOR_POSITION_ - shortCode.indexOf(SEPARATOR_); double resolution = Math.pow(20, 2 - (paddingLength / 2)); double areaToEdge = resolution / 2.0; double roundedLatitude = Math.floor(referenceLatitude / resolution) * resolution; double roundedLongitude = Math.floor(referenceLongitude / resolution) * resolution; CodeArea codeArea = decode(encode(roundedLatitude, roundedLongitude, 0).substring(0, paddingLength) + shortCode); double degreesDifference = codeArea.latitudeCenter - referenceLatitude; if (degreesDifference > areaToEdge) { codeArea.latitudeCenter -= resolution; } else if (degreesDifference < -areaToEdge) { codeArea.latitudeCenter += resolution; } degreesDifference = codeArea.longitudeCenter - referenceLongitude; if (degreesDifference > areaToEdge) { codeArea.longitudeCenter -= resolution; } else if (degreesDifference < -areaToEdge) { codeArea.longitudeCenter += resolution; } return encode( codeArea.latitudeCenter, codeArea.longitudeCenter, codeArea.codeLength); }
<filename>mantis-server/mantis-server-worker/src/test/java/io/mantisrx/server/worker/jobmaster/control/utils/IntegratorTest.java /* * Copyright 2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.mantisrx.server.worker.jobmaster.control.utils; import org.junit.Test; import rx.Observable; import rx.observers.TestSubscriber; public class IntegratorTest { private final Observable<Double> data = Observable.just(1.0, -1.0, 0.0, -10.0); @Test public void shouldIntegrateOverInput() { Observable<Double> result = data.lift(new Integrator(0)); TestSubscriber<Double> testSubscriber = new TestSubscriber<>(); result.subscribe(testSubscriber); testSubscriber.assertCompleted(); testSubscriber.assertValues(1.0, 0.0, 0.0, -10.0); } @Test public void shouldRespectMinimumValue() { Observable<Double> result = data.lift(new Integrator(0, 0.0, 10.0)); TestSubscriber<Double> testSubscriber = new TestSubscriber<>(); result.subscribe(testSubscriber); testSubscriber.assertCompleted(); testSubscriber.assertValues(1.0, 0.0, 0.0, 0.0); } @Test public void shouldRespectMaximumValue() { Observable<Double> result = data.lift(new Integrator(0, -100.0, 0.0)); TestSubscriber<Double> testSubscriber = new TestSubscriber<>(); result.subscribe(testSubscriber); testSubscriber.assertCompleted(); testSubscriber.assertValues(0.0, -1.0, -1.0, -11.0); } @Test public void shouldBeginFromInitialSuppliedValue() { Observable<Double> result = data.lift(new Integrator(1.0)); TestSubscriber<Double> testSubscriber = new TestSubscriber<>(); result.subscribe(testSubscriber); testSubscriber.assertCompleted(); testSubscriber.assertValues(2.0, 1.0, 1.0, -9.0); } }
/** * Center the dialog box on the owner frame. * * @since 1.2 */ private void centerOnOwner() { int frameX = frame.getX(); int frameY = frame.getY(); int frameWidth = frame.getWidth(); int frameHeight = frame.getHeight(); int dialogWidth = getWidth(); int dialogHeight = getHeight(); int dialogX = frameX + (frameWidth/2) - (dialogWidth/2); int dialogY = frameY + (frameHeight/2) - (dialogHeight/2); setLocation(dialogX, dialogY); }
I don’t typically watch the 24-hour Big Brother feeds, partially because I believe the editors who create the thrice-weekly Big Brother episodes are idiot-savant geniuses and I don’t want to spoil my enjoyment of their work, partially because if I wanted to watch a microscopic examination of the chaotically banal meaningfulness of human existence, I could just go see The Tree of Life again. But this news is too good to wait: Yesterday, returning fan favorite Jeff suddenly discovered, to his horror, that Professor Dumbledore from the Harry Potter franchise was gay, which led the lovable lunkhead into a delirious rant that served as a handy reminder that the problem with lovable lunkheads is that they are, ultimately, lunkheads. After noting that Dumbledore “doesn’t have any gay tendencies,” which I guess means Dumbledore didn’t have a wide stance, Jeff proclaims: “He’s in school with little kids! You don’t want to make that guy gay!” When asked to explain what he means by fellow housemate Kalia, Jeff persists: “I don’t think it’s the right thing to have a kids’ book, and to have the headmaster that’s locked away in this magical land, to be gay. That isn’t the right kind of writing to do.” Yes, truly shocking that the most popular writer in the modern world would deign to retroactively shove homosexuality into our faces by ex post facto declaring that one single character in her cast of thousands was a dude who liked other dudes. Kalia continues to bait Jeff by asking him to calmly explain himself, at which point he exclaims, “Don’t start with that f—ing s–t! Don’t tell me the right answer for f—ing TV!” Kalia explains that, well, she’s not being PC — her little sister is gay. “I don’t give a f— if your little sister is gay!” explodes Jeff. The silence that follows is painfully awkward, and is only broken when Brendon, seemingly unaware that the conversation is not a discussion about recent fantasy cinema, asks, “Did anyone watch the C.S. Lewis? The Lion, the Witch, and the Wardrobe?” Which, in fairness, does not feature any gay characters (although Aslan the Lion is a curiously confirmed bachelor, the kind of guy that you always find in Alfred Hitchcock movies sharing cocktail giggles with kindly old spinsters). (UPDATE: Alas, video of Jeff’s rant has been pulled from YouTube. CBS released a statement about the blow-up, explaining: “Any views or opinions expressed in personal commentary by a houseguest appearing on Big Brother, either on any live feed from the house or the broadcast, are those of the individual(s) speaking and do not represent the views or opinions of CBS or the producers of the program.” It’s unclear if CBS will air Jeff’s statements. Left unsaid: How this will affect CBS’ viewership in the all-important Fictional Gay British Magician demographic.) Big Brother fans, does this change how you feel about Jeff? Follow Darren on Twitter: @EWDarrenFranich Read more: ‘Evel Dick’ Donato exits ‘Big Brother’ house ‘Big Brother’ recap: The Milky Way
<reponame>ccccbjcn/nuls-v2-cplusplus-sdk /** * Copyright (c) 2020 libnuls developers (see AUTHORS) * * This file is part of libnuls. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include <sstream> #include <string> #include <boost/lexical_cast.hpp> #include <boost/test/unit_test.hpp> #include <nuls/system.hpp> using namespace nuls::system::config; BOOST_AUTO_TEST_SUITE(block_tests) static const std::string encoded_genesis_block = "01000000" "0000000000000000000000000000000000000000000000000000000000000000" "3ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a" "29ab5f49" "ffff001d" "1dac2b7c" "01" "01000000" "01" "0000000000000000000000000000000000000000000000000000000000000000ffffffff" "4d" "04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73" "ffffffff" "01" "00f2052a01000000" "43" "4104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac" "00000000"; static const auto genesis_block = block(encoded_genesis_block); // ------------------------------------------------------------------------- // BOOST_AUTO_TEST_SUITE(block__construct) BOOST_AUTO_TEST_CASE(block__construct__default) { const block block; BOOST_REQUIRE_EQUAL(block.to_string(), std::string(162, '0')); } BOOST_AUTO_TEST_CASE(block__construct__copy__expected) { const block block(genesis_block); BOOST_REQUIRE_EQUAL(block, genesis_block); } BOOST_AUTO_TEST_CASE(block__copy_assign__always__expected) { block block; block = genesis_block; BOOST_REQUIRE_EQUAL(block, genesis_block); } BOOST_AUTO_TEST_CASE(block__construct__string__expected) { const block block(encoded_genesis_block); BOOST_REQUIRE_EQUAL(block, genesis_block); } BOOST_AUTO_TEST_SUITE_END() // ------------------------------------------------------------------------- // BOOST_AUTO_TEST_SUITE(block__istream) BOOST_AUTO_TEST_CASE(block__istream__populated__expected) { block deserialized; std::stringstream serialized(encoded_genesis_block); serialized >> deserialized; BOOST_REQUIRE_EQUAL(deserialized.to_string(), encoded_genesis_block); } BOOST_AUTO_TEST_SUITE_END() // ------------------------------------------------------------------------- // BOOST_AUTO_TEST_SUITE(block__ostream) BOOST_AUTO_TEST_CASE(block__ostream__empty__expected) { std::stringstream serialized; serialized << block(); BOOST_REQUIRE_EQUAL(serialized.str(), std::string(162, '0')); } BOOST_AUTO_TEST_CASE(block__ostream__populated__expected) { std::stringstream serialized; serialized << genesis_block; BOOST_REQUIRE_EQUAL(serialized.str(), encoded_genesis_block); } BOOST_AUTO_TEST_CASE(block__ostream__boost_lexical_cast__expected) { const auto serialized = boost::lexical_cast<std::string>(genesis_block); BOOST_REQUIRE_EQUAL(serialized, encoded_genesis_block); } BOOST_AUTO_TEST_SUITE_END() BOOST_AUTO_TEST_SUITE_END()
package nl.hsac.fitnesse.fixture.util; /** * Helpers for BSNs. */ public class BsnUtil { private RandomUtil randomUtil = new RandomUtil(); /** * Generates random number that could be a BSN. * Based on: http://www.testnummers.nl/bsn.js * @return random BSN. */ public String generateBsn() { String Result1 = ""; int Nr9 = randomUtil.random(3); int Nr8 = randomUtil.random(10); int Nr7 = randomUtil.random(10); int Nr6 = randomUtil.random(10); int Nr5 = randomUtil.random(10); int Nr4 = randomUtil.random(10); int Nr3 = randomUtil.random(10); int Nr2 = randomUtil.random(10); int Nr1 = 0; int SofiNr = 0; if ((Nr9 == 0) && (Nr8 == 0) && (Nr7 == 0)) { Nr8 = 1; } SofiNr = 9 * Nr9 + 8 * Nr8 + 7 * Nr7 + 6 * Nr6 + 5 * Nr5 + 4 * Nr4 + 3 * Nr3 + 2 * Nr2; Nr1 = floor(SofiNr - (floor(SofiNr / 11)) * 11); if (Nr1 > 9) { if (Nr2 > 0) { Nr2 -= 1; Nr1 = 8; } else { Nr2 += 1; Nr1 = 1; } } Result1 += Nr9; Result1 += Nr8; Result1 += Nr7; Result1 += Nr6; Result1 += Nr5; Result1 += Nr4; Result1 += Nr3; Result1 += Nr2; Result1 += Nr1; return Result1; } /** * Checks whether BSN is valid. * Based on: https://mxforum.mendix.com/questions/2162/ * @param bsn BSN to check. * @return true if it is structurally sound. */ public boolean testBsn(String bsn) { try { Double.parseDouble(bsn); } catch (Exception e) { return false; } if (bsn.length() != 9) { return false; } else { int checksum = 0; for (int i = 0; i < 8; i++) { checksum += (Integer.parseInt(Character.toString(bsn.charAt(i))) * (9 - i)); } checksum -= Integer.parseInt(Character.toString(bsn.charAt(8))); if (checksum % 11 != 0) { return false; } } return true; } private int floor(double d) { return (int) d; } }
<gh_stars>0 export default interface Subject { avgGrade: number; subjectName: string; totalStudents: number; failedStudents: number; subjectTeacher: string; isActive: boolean; }
declare type v1Key = string | number | boolean | null | Record<string, unknown> | undefined; export declare function hashV1PartitionKey(partitionKey: v1Key): string; export {}; //# sourceMappingURL=v1.d.ts.map
/** * Implementation of EvolvingSchema which supports 'Rules Processor' component. */ public class RulesProcessorSchemaEvolver implements EvolvingSchema { private final ObjectMapper objectMapper = new ObjectMapper(); @Override public Set<Stream> apply(String config, Stream inputStream) throws BadComponentConfigException { try { Map<String, Object> componentConfig = objectMapper.readValue(config, Map.class); Map<String, Object> rulesConfig = (Map<String, Object>) componentConfig.get( TopologyLayoutConstants.JSON_KEY_CONFIG); Map<String, Object> rulesProcessorConfig = (Map<String, Object>) rulesConfig.get( TopologyLayoutConstants.JSON_KEY_RULES_PROCESSOR_CONFIG); RulesProcessor rulesProcessor = buildRulesProcessor(rulesProcessorConfig); Set<Stream> streams = Sets.newHashSet(); for (Rule rule : rulesProcessor.getRules()) { streams.addAll(extractStreamsFromRule(inputStream, rule)); } return streams; } catch (Exception e) { throw new BadComponentConfigException("Exception while simulating evolution of rules schema", e); } } private RulesProcessor buildRulesProcessor(Map<String, Object> rulesProcessorConfig) throws JsonProcessingException { String rulesProcessorConfigJson = objectMapper.writeValueAsString(rulesProcessorConfig); return Utils.createObjectFromJson(rulesProcessorConfigJson, RulesProcessor.class); } private Set<Stream> extractStreamsFromRule(Stream inputStream, Rule rule) throws ParserException { Set<Stream> streamSet = Sets.newHashSet(); // TODO: do we evaluate all rules per each input stream? if not, how we connect (input stream, rule) and how we know it? for (Action action : rule.getActions()) { streamSet.add(extractSchemaFromAction(inputStream, rule, action)); } return streamSet; } private Stream extractSchemaFromAction(Stream inputStream, Rule rule, Action action) throws ParserException { String streamId = rule.getOutputStreamNameForAction(action); Map<String, Object> outputFieldsAndDefaults = null ; if(action instanceof NotifierAction) { outputFieldsAndDefaults = ((NotifierAction) action).getOutputFieldsAndDefaults(); } if (outputFieldsAndDefaults != null && !outputFieldsAndDefaults.isEmpty()) { Schema schema = simulateFieldsProjection(inputStream.getSchema(), outputFieldsAndDefaults); return new Stream(streamId, schema); } else { // no projection return new Stream(streamId, inputStream.getSchema()); } } private Schema simulateFieldsProjection(Schema inputSchema, Map<String, Object> outputFieldsAndDefaults) throws ParserException { Schema.SchemaBuilder schemaBuilder = new Schema.SchemaBuilder(); // projection for (Map.Entry<String, Object> fieldAndDefault : outputFieldsAndDefaults.entrySet()) { String fieldName = fieldAndDefault.getKey(); Object defaultValue = fieldAndDefault.getValue(); Schema.Field fieldFromDefault = Schema.Field.of(fieldName, Schema.fromJavaType(defaultValue)); Schema.Field foundField = findFieldByName(inputSchema.getFields(), fieldFromDefault.getName()); if (null != foundField) { schemaBuilder.field(foundField); } else { schemaBuilder.field(fieldFromDefault); } } return schemaBuilder.build(); } private Schema.Field findFieldByName(List<Schema.Field> fields, String fieldName) { for (Schema.Field field : fields) { if (field.getName().equals(fieldName)) { return field; } } return null; } }
/// Builds a transaction from the configured spends and outputs. /// /// Upon success, returns a tuple containing the final transaction, and the /// [`SaplingMetadata`] generated during the build process. /// /// `consensus_branch_id` must be valid for the block height that this transaction is /// targeting. An invalid `consensus_branch_id` will *not* result in an error from /// this function, and instead will generate a transaction that will be rejected by /// the network. pub fn build( mut self, prover: &impl TxProver, ) -> Result<(Transaction, SaplingMetadata), Error> { let consensus_branch_id = BranchId::for_height(&self.params, self.target_height); // determine transaction version let version = TxVersion::suggested_for_branch(consensus_branch_id); // // Consistency checks // // Valid change let change = (self.value_balance()? - self.fee).ok_or(Error::InvalidAmount)?; if change.is_negative() { return Err(Error::ChangeIsNegative(change)); } // // Change output // if change.is_positive() { match self.change_address.take() { Some(ChangeAddress::SaplingChangeAddress(ovk, addr)) => { self.add_sapling_output(Some(ovk), addr, change, None)?; } None => { let (ovk, addr) = self .sapling_builder .get_candidate_change_address() .ok_or(Error::NoChangeAddress)?; self.add_sapling_output(Some(ovk), addr, change, None)?; } } } let transparent_bundle = self.transparent_builder.build(); let mut ctx = prover.new_sapling_proving_context(); let (sapling_bundle, tx_metadata) = self .sapling_builder .build( prover, &mut ctx, &mut self.rng, self.target_height, self.progress_notifier.as_ref(), ) .map_err(Error::SaplingBuild)?; #[cfg(feature = "zfuture")] let tze_bundle = self.tze_builder.build(); let unauthed_tx = TransactionData { version, lock_time: 0, expiry_height: self.expiry_height, transparent_bundle, sprout_bundle: None, sapling_bundle, orchard_bundle: None, #[cfg(feature = "zfuture")] tze_bundle, }; // // Signatures -- everything but the signatures must already have been added. // let mut sighash = [0u8; 32]; sighash.copy_from_slice(&signature_hash_data( &unauthed_tx, consensus_branch_id, SIGHASH_ALL, SignableInput::Shielded, )); #[cfg(feature = "transparent-inputs")] let transparent_sigs = self .transparent_builder .create_signatures(&unauthed_tx, consensus_branch_id); let sapling_sigs = self .sapling_builder .create_signatures(prover, &mut ctx, &mut self.rng, &sighash, &tx_metadata) .map_err(Error::SaplingBuild)?; #[cfg(feature = "zfuture")] let tze_witnesses = self .tze_builder .create_witnesses(&unauthed_tx) .map_err(Error::TzeBuild)?; Ok(( Self::apply_signatures( consensus_branch_id, unauthed_tx, #[cfg(feature = "transparent-inputs")] transparent_sigs, sapling_sigs, None, #[cfg(feature = "zfuture")] tze_witnesses, ) .expect("An IO error occurred applying signatures."), tx_metadata, )) }
/** * An export job result * @author rspace * @since 1.3 * */ @Data public class ExportJobResult implements Result { private String checksum; private String algorithm; private Long size; /** * An expiry time after which the result may no longer be acce */ private Date expiryDate; }
def check_food_bowl(self): files = [] has_food = False for (dirpath, dirnames, filenames) in os.walk(self.food_bowl): check_files = [os.path.join(dirpath, file) for file in filenames] files.append(check_files) if len(check_files) > 0: for file in check_files: self.add_food(file) if len(self.stomach) > 0: has_food = True return has_food
def from_rodrigues(cls, r_vec, ctype=ctypes.c_double): r_vec = EigenArray.from_iterable(r_vec, ctype, (3, 1)) s = cls._gen_spec(ctype) r_from_rod = cls._get_c_function(s, 'new_from_rodrigues') r_from_rod.argtypes = [EigenArray.c_ptr_type(3, 1, ctype), VitalErrorHandle.C_TYPE_PTR] r_from_rod.restype = cls.C_TYPE_PTR[s] with VitalErrorHandle() as eh: r_ptr = r_from_rod(r_vec, eh) return Rotation(ctype, r_ptr)
/// Continuously tracks CDS and EDS resources on an ADS server, /// sending summarized cluster updates on the provided channel. pub async fn run( self, node_id: String, management_servers: Vec<ManagementServer>, cluster_updates_tx: mpsc::Sender<ClusterUpdate>, listener_manager_args: ListenerManagerArgs, mut shutdown_rx: watch::Receiver<()>, ) -> Result<()> { let metrics = self.metrics; let mut server_iter = management_servers.iter().cycle(); let mut backoff = ExponentialBackoff::new(Duration::from_millis(BACKOFF_INITIAL_DELAY_MILLISECONDS)); let max_delay = Duration::from_secs(BACKOFF_MAX_DELAY_SECONDS); let retry_config = RetryFutureConfig::new(u32::MAX).custom_backoff(|attempt, error: &_| { // reset after success if attempt <= 1 { backoff = ExponentialBackoff::new(Duration::from_millis( BACKOFF_INITIAL_DELAY_MILLISECONDS, )); } // max delay + jitter of up to 2 seconds let mut delay = backoff.delay(attempt, &error); if delay > max_delay { delay = max_delay; } delay += Duration::from_millis( rand::thread_rng().gen_range(0..BACKOFF_MAX_JITTER_MILLISECONDS), ); match error { RpcSessionError::NonRecoverable(message, error) => { tracing::error!(%message, %error); RetryPolicy::Break } RpcSessionError::InitialConnect(ref error) => { tracing::error!(%error, "Unable to connect to the XDS server"); // Do not retry if this is an invalid URL error that we cannot recover from. // Need to use {:?} as the Display output only returns 'transport error' let err_description = format!("{:?}", error); if err_description.to_lowercase().contains("invalid url") { RetryPolicy::Break } else { RetryPolicy::Delay(delay) } } RpcSessionError::Receive(ref status) => { tracing::error!(status = ?status, "Failed to receive response from XDS server"); RetryPolicy::Delay(delay) } } }); let session_shutdown_rx = shutdown_rx.clone(); let handle = tryhard::retry_fn(|| { let (discovery_req_tx, discovery_req_rx) = mpsc::channel::<DiscoveryRequest>(UPDATES_CHANNEL_BUFFER_SIZE); let cluster_manager = ClusterManager::new(cluster_updates_tx.clone(), discovery_req_tx.clone()); let listener_manager = ListenerManager::new(listener_manager_args.clone(), discovery_req_tx); let resource_handlers = ResourceHandlers { cluster_manager, listener_manager, }; RpcSession { discovery_req_rx, metrics: metrics.clone(), node_id: node_id.clone(), // server_iter is guaranteed to always have at least one entry. addr: server_iter .next() .map(|server| server.address.to_string()) .unwrap(), resource_handlers, shutdown_rx: session_shutdown_rx.clone(), } .run() }) .with_config(retry_config); tokio::select! { result = handle => result.map(drop).map_err(|error| eyre::eyre!(error)), _ = shutdown_rx.changed() => { tracing::info!("Stopping client execution - received shutdown signal."); Ok(()) }, } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elasticbeanstalk.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * The record of a completed or failed managed action. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticbeanstalk-2010-12-01/ManagedActionHistoryItem" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ManagedActionHistoryItem implements Serializable, Cloneable { /** * <p> * A unique identifier for the managed action. * </p> */ private String actionId; /** * <p> * The type of the managed action. * </p> */ private String actionType; /** * <p> * A description of the managed action. * </p> */ private String actionDescription; /** * <p> * If the action failed, the type of failure. * </p> */ private String failureType; /** * <p> * The status of the action. * </p> */ private String status; /** * <p> * If the action failed, a description of the failure. * </p> */ private String failureDescription; /** * <p> * The date and time that the action started executing. * </p> */ private java.util.Date executedTime; /** * <p> * The date and time that the action finished executing. * </p> */ private java.util.Date finishedTime; /** * <p> * A unique identifier for the managed action. * </p> * * @param actionId * A unique identifier for the managed action. */ public void setActionId(String actionId) { this.actionId = actionId; } /** * <p> * A unique identifier for the managed action. * </p> * * @return A unique identifier for the managed action. */ public String getActionId() { return this.actionId; } /** * <p> * A unique identifier for the managed action. * </p> * * @param actionId * A unique identifier for the managed action. * @return Returns a reference to this object so that method calls can be chained together. */ public ManagedActionHistoryItem withActionId(String actionId) { setActionId(actionId); return this; } /** * <p> * The type of the managed action. * </p> * * @param actionType * The type of the managed action. * @see ActionType */ public void setActionType(String actionType) { this.actionType = actionType; } /** * <p> * The type of the managed action. * </p> * * @return The type of the managed action. * @see ActionType */ public String getActionType() { return this.actionType; } /** * <p> * The type of the managed action. * </p> * * @param actionType * The type of the managed action. * @return Returns a reference to this object so that method calls can be chained together. * @see ActionType */ public ManagedActionHistoryItem withActionType(String actionType) { setActionType(actionType); return this; } /** * <p> * The type of the managed action. * </p> * * @param actionType * The type of the managed action. * @see ActionType */ public void setActionType(ActionType actionType) { withActionType(actionType); } /** * <p> * The type of the managed action. * </p> * * @param actionType * The type of the managed action. * @return Returns a reference to this object so that method calls can be chained together. * @see ActionType */ public ManagedActionHistoryItem withActionType(ActionType actionType) { this.actionType = actionType.toString(); return this; } /** * <p> * A description of the managed action. * </p> * * @param actionDescription * A description of the managed action. */ public void setActionDescription(String actionDescription) { this.actionDescription = actionDescription; } /** * <p> * A description of the managed action. * </p> * * @return A description of the managed action. */ public String getActionDescription() { return this.actionDescription; } /** * <p> * A description of the managed action. * </p> * * @param actionDescription * A description of the managed action. * @return Returns a reference to this object so that method calls can be chained together. */ public ManagedActionHistoryItem withActionDescription(String actionDescription) { setActionDescription(actionDescription); return this; } /** * <p> * If the action failed, the type of failure. * </p> * * @param failureType * If the action failed, the type of failure. * @see FailureType */ public void setFailureType(String failureType) { this.failureType = failureType; } /** * <p> * If the action failed, the type of failure. * </p> * * @return If the action failed, the type of failure. * @see FailureType */ public String getFailureType() { return this.failureType; } /** * <p> * If the action failed, the type of failure. * </p> * * @param failureType * If the action failed, the type of failure. * @return Returns a reference to this object so that method calls can be chained together. * @see FailureType */ public ManagedActionHistoryItem withFailureType(String failureType) { setFailureType(failureType); return this; } /** * <p> * If the action failed, the type of failure. * </p> * * @param failureType * If the action failed, the type of failure. * @see FailureType */ public void setFailureType(FailureType failureType) { withFailureType(failureType); } /** * <p> * If the action failed, the type of failure. * </p> * * @param failureType * If the action failed, the type of failure. * @return Returns a reference to this object so that method calls can be chained together. * @see FailureType */ public ManagedActionHistoryItem withFailureType(FailureType failureType) { this.failureType = failureType.toString(); return this; } /** * <p> * The status of the action. * </p> * * @param status * The status of the action. * @see ActionHistoryStatus */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the action. * </p> * * @return The status of the action. * @see ActionHistoryStatus */ public String getStatus() { return this.status; } /** * <p> * The status of the action. * </p> * * @param status * The status of the action. * @return Returns a reference to this object so that method calls can be chained together. * @see ActionHistoryStatus */ public ManagedActionHistoryItem withStatus(String status) { setStatus(status); return this; } /** * <p> * The status of the action. * </p> * * @param status * The status of the action. * @see ActionHistoryStatus */ public void setStatus(ActionHistoryStatus status) { withStatus(status); } /** * <p> * The status of the action. * </p> * * @param status * The status of the action. * @return Returns a reference to this object so that method calls can be chained together. * @see ActionHistoryStatus */ public ManagedActionHistoryItem withStatus(ActionHistoryStatus status) { this.status = status.toString(); return this; } /** * <p> * If the action failed, a description of the failure. * </p> * * @param failureDescription * If the action failed, a description of the failure. */ public void setFailureDescription(String failureDescription) { this.failureDescription = failureDescription; } /** * <p> * If the action failed, a description of the failure. * </p> * * @return If the action failed, a description of the failure. */ public String getFailureDescription() { return this.failureDescription; } /** * <p> * If the action failed, a description of the failure. * </p> * * @param failureDescription * If the action failed, a description of the failure. * @return Returns a reference to this object so that method calls can be chained together. */ public ManagedActionHistoryItem withFailureDescription(String failureDescription) { setFailureDescription(failureDescription); return this; } /** * <p> * The date and time that the action started executing. * </p> * * @param executedTime * The date and time that the action started executing. */ public void setExecutedTime(java.util.Date executedTime) { this.executedTime = executedTime; } /** * <p> * The date and time that the action started executing. * </p> * * @return The date and time that the action started executing. */ public java.util.Date getExecutedTime() { return this.executedTime; } /** * <p> * The date and time that the action started executing. * </p> * * @param executedTime * The date and time that the action started executing. * @return Returns a reference to this object so that method calls can be chained together. */ public ManagedActionHistoryItem withExecutedTime(java.util.Date executedTime) { setExecutedTime(executedTime); return this; } /** * <p> * The date and time that the action finished executing. * </p> * * @param finishedTime * The date and time that the action finished executing. */ public void setFinishedTime(java.util.Date finishedTime) { this.finishedTime = finishedTime; } /** * <p> * The date and time that the action finished executing. * </p> * * @return The date and time that the action finished executing. */ public java.util.Date getFinishedTime() { return this.finishedTime; } /** * <p> * The date and time that the action finished executing. * </p> * * @param finishedTime * The date and time that the action finished executing. * @return Returns a reference to this object so that method calls can be chained together. */ public ManagedActionHistoryItem withFinishedTime(java.util.Date finishedTime) { setFinishedTime(finishedTime); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getActionId() != null) sb.append("ActionId: ").append(getActionId()).append(","); if (getActionType() != null) sb.append("ActionType: ").append(getActionType()).append(","); if (getActionDescription() != null) sb.append("ActionDescription: ").append(getActionDescription()).append(","); if (getFailureType() != null) sb.append("FailureType: ").append(getFailureType()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getFailureDescription() != null) sb.append("FailureDescription: ").append(getFailureDescription()).append(","); if (getExecutedTime() != null) sb.append("ExecutedTime: ").append(getExecutedTime()).append(","); if (getFinishedTime() != null) sb.append("FinishedTime: ").append(getFinishedTime()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ManagedActionHistoryItem == false) return false; ManagedActionHistoryItem other = (ManagedActionHistoryItem) obj; if (other.getActionId() == null ^ this.getActionId() == null) return false; if (other.getActionId() != null && other.getActionId().equals(this.getActionId()) == false) return false; if (other.getActionType() == null ^ this.getActionType() == null) return false; if (other.getActionType() != null && other.getActionType().equals(this.getActionType()) == false) return false; if (other.getActionDescription() == null ^ this.getActionDescription() == null) return false; if (other.getActionDescription() != null && other.getActionDescription().equals(this.getActionDescription()) == false) return false; if (other.getFailureType() == null ^ this.getFailureType() == null) return false; if (other.getFailureType() != null && other.getFailureType().equals(this.getFailureType()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getFailureDescription() == null ^ this.getFailureDescription() == null) return false; if (other.getFailureDescription() != null && other.getFailureDescription().equals(this.getFailureDescription()) == false) return false; if (other.getExecutedTime() == null ^ this.getExecutedTime() == null) return false; if (other.getExecutedTime() != null && other.getExecutedTime().equals(this.getExecutedTime()) == false) return false; if (other.getFinishedTime() == null ^ this.getFinishedTime() == null) return false; if (other.getFinishedTime() != null && other.getFinishedTime().equals(this.getFinishedTime()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getActionId() == null) ? 0 : getActionId().hashCode()); hashCode = prime * hashCode + ((getActionType() == null) ? 0 : getActionType().hashCode()); hashCode = prime * hashCode + ((getActionDescription() == null) ? 0 : getActionDescription().hashCode()); hashCode = prime * hashCode + ((getFailureType() == null) ? 0 : getFailureType().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getFailureDescription() == null) ? 0 : getFailureDescription().hashCode()); hashCode = prime * hashCode + ((getExecutedTime() == null) ? 0 : getExecutedTime().hashCode()); hashCode = prime * hashCode + ((getFinishedTime() == null) ? 0 : getFinishedTime().hashCode()); return hashCode; } @Override public ManagedActionHistoryItem clone() { try { return (ManagedActionHistoryItem) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
The Dummett Freighter: A nineteenth-century log sailing canoe from northeastern Florida This article provides a comparative physical and cultural study of a cypress log sailing canoe and the plantation culture of nineteenth-century north-eastern Florida that created it. The author makes the argument that this and other vessels of similar construction represent a typology of log boat construction that was limited to Florida’s north-east during the mid- to late nineteenth century. These vessels evolved to provide a means of commerce between the settlements along the rivers and estuaries of north-eastern Florida. Those vessels that remain provide an important historic link with early American settlers.
// This method gets called by the TCP module when an error occurs. static void ftp_MsgConnError(void *arg, err_t err) { UARTprintf("ftp_MsgConnError Called!\r\n"); PrintErrorNum(err); }
<reponame>8bittoken/NFTDapp import { PairingTypes } from "@walletconnect/types"; export declare const PAIRING_JSONRPC: PairingTypes.JsonRpc; export declare const PAIRING_CONTEXT = "pairing"; export declare const PAIRING_DEFAULT_TTL = 2592000; export declare const PAIRING_SIGNAL_METHOD_URI: "uri"; export declare const PAIRING_STATUS: { proposed: "proposed"; responded: "responded"; pending: string; settled: string; }; export declare const PAIRING_EVENTS: PairingTypes.Events; //# sourceMappingURL=pairing.d.ts.map
// NewMetaHandler creates a MetaHandle for a given extensions. func NewMetaHandler(in string) *MetaHandle { x := &MetaHandle{ext: in} x.Handler() return x }
/** Represent the arity of a connective. * * @author Adolfo Gustavo Serra Seca Neto * */ public class Arity { public static Arity UNARY = new Arity(); public static Arity BINARY = new Arity(); public static Arity ZEROARY = new Arity(); public static Arity NARY = new Arity(); private Arity() { } }
<gh_stars>1000+ package grpc import ( "context" "google.golang.org/grpc/codes" grpc_health "google.golang.org/grpc/health/grpc_health_v1" "google.golang.org/grpc/status" "github.com/pomerium/pomerium/internal/log" ) type healthCheckSrv struct { } // NewHealthCheckServer returns a basic health checker func NewHealthCheckServer() grpc_health.HealthServer { return &healthCheckSrv{} } // Check confirms service is reachable, and assumes any service is operational // an outlier detection should be used to detect runtime malfunction based on consequitive 5xx func (h *healthCheckSrv) Check(ctx context.Context, req *grpc_health.HealthCheckRequest) (*grpc_health.HealthCheckResponse, error) { log.Debug(ctx).Str("service", req.Service).Msg("health check") return &grpc_health.HealthCheckResponse{ Status: grpc_health.HealthCheckResponse_SERVING, }, nil } // Watch is not implemented as is not used by Envoy func (h *healthCheckSrv) Watch(req *grpc_health.HealthCheckRequest, _ grpc_health.Health_WatchServer) error { log.Error(context.Background()).Str("service", req.Service).Msg("health check watch") return status.Errorf(codes.Unimplemented, "method Watch not implemented") }
{-# LANGUAGE PartialTypeSignatures, TypeFamilies, InstanceSigs #-} module WildcardInTypeFamilyInstanceRHS where class Foo k where type Dual k :: * instance Foo Int where type Dual Int = Maybe _
<gh_stars>0 import Parser = require("@yang991178/rss-parser") import intl = require("react-intl-universal") import * as db from "../db" import { fetchFavicon, ActionStatus, AppThunk, parseRSS } from "../utils" import { RSSItem, insertItems, ItemActionTypes, FETCH_ITEMS, MARK_READ, MARK_UNREAD, MARK_ALL_READ } from "./item" import { SourceGroup } from "./group" import { saveSettings } from "./app" import { remote } from "electron" export enum SourceOpenTarget { Local, Webpage, External } export class RSSSource { sid: number url: string iconurl: string name: string openTarget: SourceOpenTarget unreadCount: number lastFetched: Date fetchFrequency?: number // in minutes constructor(url: string, name: string = null) { this.url = url this.name = name this.openTarget = SourceOpenTarget.Local this.lastFetched = new Date() } static async fetchMetaData(source: RSSSource) { let feed = await parseRSS(source.url) if (!source.name) { if (feed.title) source.name = feed.title.trim() source.name = source.name || intl.get("sources.untitled") } let domain = source.url.split("/").slice(0, 3).join("/") try { let f = await fetchFavicon(domain) if (f !== null) source.iconurl = f } finally { return feed } } private static checkItem(source: RSSSource, item: Parser.Item): Promise<RSSItem> { return new Promise<RSSItem>((resolve, reject) => { let i = new RSSItem(item, source) db.idb.findOne({ source: i.source, title: i.title, date: i.date }, (err, doc) => { if (err) { reject(err) } else if (doc === null) { resolve(i) } else { resolve(null) } }) }) } static checkItems(source: RSSSource, items: Parser.Item[]): Promise<RSSItem[]> { return new Promise<RSSItem[]>((resolve, reject) => { let p = new Array<Promise<RSSItem>>() for (let item of items) { p.push(this.checkItem(source, item)) } Promise.all(p).then(values => { resolve(values.filter(v => v != null)) }).catch(e => { reject(e) }) }) } static async fetchItems(source: RSSSource) { let feed = await parseRSS(source.url) db.sdb.update({ sid: source.sid }, { $set: { lastFetched: new Date() } }) return await this.checkItems(source, feed.items) } } export type SourceState = { [sid: number]: RSSSource } export const INIT_SOURCES = "INIT_SOURCES" export const ADD_SOURCE = "ADD_SOURCE" export const UPDATE_SOURCE = "UPDATE_SOURCE" export const DELETE_SOURCE = "DELETE_SOURCE" interface InitSourcesAction { type: typeof INIT_SOURCES status: ActionStatus sources?: RSSSource[] err? } interface AddSourceAction { type: typeof ADD_SOURCE status: ActionStatus batch: boolean source?: RSSSource err? } interface UpdateSourceAction { type: typeof UPDATE_SOURCE source: RSSSource } interface DeleteSourceAction { type: typeof DELETE_SOURCE, source: RSSSource } export type SourceActionTypes = InitSourcesAction | AddSourceAction | UpdateSourceAction | DeleteSourceAction export function initSourcesRequest(): SourceActionTypes { return { type: INIT_SOURCES, status: ActionStatus.Request } } export function initSourcesSuccess(sources: RSSSource[]): SourceActionTypes { return { type: INIT_SOURCES, status: ActionStatus.Success, sources: sources } } export function initSourcesFailure(err): SourceActionTypes { return { type: INIT_SOURCES, status: ActionStatus.Failure, err: err } } function unreadCount(source: RSSSource): Promise<RSSSource> { return new Promise<RSSSource>((resolve, reject) => { db.idb.count({ source: source.sid, hasRead: false }, (err, n) => { if (err) { reject(err) } else { source.unreadCount = n resolve(source) } }) }) } export function initSources(): AppThunk<Promise<void>> { return (dispatch) => { dispatch(initSourcesRequest()) return new Promise<void>((resolve, reject) => { db.sdb.find({}).sort({ sid: 1 }).exec((err, sources) => { if (err) { dispatch(initSourcesFailure(err)) reject(err) } else { let p = sources.map(s => unreadCount(s)) Promise.all(p) .then(values => { dispatch(initSourcesSuccess(values)) resolve() }) .catch(err => reject(err)) } }) }) } } export function addSourceRequest(batch: boolean): SourceActionTypes { return { type: ADD_SOURCE, batch: batch, status: ActionStatus.Request } } export function addSourceSuccess(source: RSSSource, batch: boolean): SourceActionTypes { return { type: ADD_SOURCE, batch: batch, status: ActionStatus.Success, source: source } } export function addSourceFailure(err, batch: boolean): SourceActionTypes { return { type: ADD_SOURCE, batch: batch, status: ActionStatus.Failure, err: err } } function insertSource(source: RSSSource, trials = 0): AppThunk<Promise<RSSSource>> { return (dispatch, getState) => { return new Promise((resolve, reject) => { if (trials >= 25) { reject("Failed to insert the source into NeDB.") return } let sids = Object.values(getState().sources).map(s => s.sid) source.sid = Math.max(...sids, -1) + 1 db.sdb.insert(source, (err, inserted) => { if (err) { if (/^Can't insert key [0-9]+,/.test(err.message)) { console.log("sid conflict") dispatch(insertSource(source, trials + 1)) .then(inserted => resolve(inserted)) .catch(err => reject(err)) } else { reject(err) } } else { resolve(inserted) } }) }) } } export function addSource(url: string, name: string = null, batch = false): AppThunk<Promise<number>> { return (dispatch, getState) => { let app = getState().app if (app.sourceInit) { dispatch(addSourceRequest(batch)) let source = new RSSSource(url, name) return RSSSource.fetchMetaData(source) .then(feed => { return dispatch(insertSource(source)) .then(inserted => { inserted.unreadCount = feed.items.length dispatch(addSourceSuccess(inserted, batch)) return RSSSource.checkItems(inserted, feed.items) .then(items => insertItems(items)) .then(() => { SourceGroup.save(getState().groups) return inserted.sid }) }) }) .catch(e => { dispatch(addSourceFailure(e, batch)) if (!batch) { remote.dialog.showErrorBox(intl.get("sources.errorAdd"), String(e)) } return Promise.reject(e) }) } return new Promise((_, reject) => { reject("Sources not initialized.") }) } } export function updateSourceDone(source: RSSSource): SourceActionTypes { return { type: UPDATE_SOURCE, source: source } } export function updateSource(source: RSSSource): AppThunk { return (dispatch) => { let sourceCopy = { ...source } delete sourceCopy.sid delete sourceCopy.unreadCount db.sdb.update({ sid: source.sid }, { $set: { ...sourceCopy }}, {}, err => { if (!err) { dispatch(updateSourceDone(source)) } }) } } export function deleteSourceDone(source: RSSSource): SourceActionTypes { return { type: DELETE_SOURCE, source: source } } export function deleteSource(source: RSSSource): AppThunk { return (dispatch, getState) => { dispatch(saveSettings()) db.idb.remove({ source: source.sid }, { multi: true }, (err) => { if (err) { console.log(err) dispatch(saveSettings()) } else { db.sdb.remove({ sid: source.sid }, {}, (err) => { if (err) { console.log(err) dispatch(saveSettings()) } else { dispatch(deleteSourceDone(source)) SourceGroup.save(getState().groups) dispatch(saveSettings()) } }) } }) } } export function sourceReducer( state: SourceState = {}, action: SourceActionTypes | ItemActionTypes ): SourceState { switch (action.type) { case INIT_SOURCES: switch (action.status) { case ActionStatus.Success: { let newState: SourceState = {} for (let source of action.sources) { newState[source.sid] = source } return newState } default: return state } case ADD_SOURCE: switch (action.status) { case ActionStatus.Success: return { ...state, [action.source.sid]: action.source } default: return state } case UPDATE_SOURCE: return { ...state, [action.source.sid]: action.source } case DELETE_SOURCE: { delete state[action.source.sid] return { ...state } } case FETCH_ITEMS: { switch (action.status) { case ActionStatus.Success: { let updateMap = new Map<number, number>() for (let item of action.items) { updateMap.set( item.source, updateMap.has(item.source) ? (updateMap.get(item.source) + 1) : 1) } let nextState = {} as SourceState for (let [s, source] of Object.entries(state)) { let sid = parseInt(s) if (updateMap.has(sid)) { nextState[sid] = { ...source, unreadCount: source.unreadCount + updateMap.get(sid) } as RSSSource } else { nextState[sid] = source } } return nextState } default: return state } } case MARK_UNREAD: case MARK_READ: return { ...state, [action.item.source]: { ...state[action.item.source], unreadCount: state[action.item.source].unreadCount + (action.type === MARK_UNREAD ? 1 : -1) } as RSSSource } case MARK_ALL_READ: { let nextState = {} as SourceState let sids = new Set(action.sids) for (let [s, source] of Object.entries(state)) { let sid = parseInt(s) if (sids.has(sid) && source.unreadCount > 0) { nextState[sid] = { ...source, unreadCount: 0 } as RSSSource } else { nextState[sid] = source } } return nextState } default: return state } }
<filename>cython/test.py<gh_stars>0 import gpuadder import numpy as np import numpy.testing as npt import gpuadder def test(): arr = np.array(np.linspace(1,128,128), dtype=np.int32) adder = gpuadder.GPUAdder(arr) adder.increment() adder.retreive_inplace() results2 = adder.retreive() npt.assert_array_equal(arr, np.linspace(1,128,128)+1) npt.assert_array_equal(results2, np.linspace(1,128,128)+1) print(arr) print(results2) if __name__ == '__main__': test()
#pragma GCC optimize("Ofast") #pragma GCC optimization ("O3") #pragma GCC optimize ("unroll-loops") #pragma GCC target("avx,avx2,fma") #include<bits/stdc++.h> #include<ext/pb_ds/assoc_container.hpp> #include<ext/pb_ds/tree_policy.hpp> using namespace __gnu_pbds; #define p 1000000007 #define fi first #define se second #define pii pair<int,int> #define pll pair<long long,long long> #define LONGLONG_MAX 10000000000000000 using namespace std; template <typename T> using ordered_set = tree<T, null_type, less<T>, rb_tree_tag, tree_order_statistics_node_update>; int main(){ ios_base::sync_with_stdio(false); cin.tie(NULL); cout.tie(NULL); int n,m; cin>>n>>m; multiset<int>s; for(int i=0;i<n;i++){ int x; cin>>x; s.insert(x); } vector<pii>v(m); for(int i=0;i<m;i++){ cin>>v[i].se>>v[i].fi; } sort(v.begin(),v.end()); bool b=true; for(int i=m-1;i>=0&&b;i--){ while(v[i].se--){ int x=*s.begin(); s.erase(s.begin()); if(x<v[i].fi){ s.insert(v[i].fi); } else{ s.insert(x); b=false; break; } } } long long x=0; for(auto i:s){ x=x+i; } cout<<x; return 0; }
/** * an object encapsulating the essential information on changes to a MutableCharSequence * * @author sautter */ public static class CharSequenceEvent { /** the MutableCharSequence that was changed */ public final MutableCharSequence charSequence; /** the start offset of the change */ public final int offset; /** the sequence of chars that was inserted (will be empty on pure removals, never null) */ public final java.lang.CharSequence inserted; /** the sequence of chars that was removed (will be empty on pure insertions, never null) */ public final java.lang.CharSequence removed; /** Constructor * @param charSequence the MutableCharSequence that was changed * @param offset the start offset of the change * @param inserted the sequence of chars that was inserted (will be empty on pure removals, never null) * @param removed the sequence of chars that was removed (will be empty on pure insertions, never null) */ public CharSequenceEvent(MutableCharSequence charSequence, int offset, java.lang.CharSequence inserted, java.lang.CharSequence removed) { this.charSequence = charSequence; this.offset = offset; this.inserted = ((inserted == null) ? "" : inserted); this.removed = ((removed == null) ? "" : removed); } }
// TestContainerWait tests the error cases for the container check wait loop. func TestContainerWait(t *testing.T) { assert := assert.New(t) labels := map[string]string{ "com.ddev.site-name": "foo", "com.docker.compose.service": "web", } err := ContainerWait(0, labels) assert.Error(err) assert.Equal("health check timed out", err.Error()) err = ContainerWait(5, labels) assert.Error(err) assert.Equal("failed to query container", err.Error()) }
/* * Distributed under the Boost Software License, Version 1.0.(See accompanying * file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.) * * File: waterfront/refinement.hpp * Date: Sun Feb 03 14:10:28 MST 2008 * Copyright: 2008 CodeRage, LLC * Author: <NAME> * Contact: turkanis at coderage dot com * * Defines the class template refinement taking a model or Map and * generating a second model of Map by subdividing each cell of the * original map. */ #ifndef WATERFRONT_REFINEMENT_HPP_INCLUDED #define WATERFRONT_REFINEMENT_HPP_INCLUDED #include <cmath> #include "map_base.hpp" namespace waterfront { template<typename Map> class refinement : public map_base< refinement<Map> > { public: friend class map_base< refinement<Map> >; typedef map_base< refinement<Map> > base_type; refinement(Map& map, int resolution) : base_type( map.columns() * resolution, map.rows() * resolution, map.scale() / resolution ), map_(map), resolution_(resolution) { } private: bool at(int col, int row) const { int c = floor(col / resolution_); int r = floor(row / resolution_); return map_[c][r]; } static int floor(double d) { return static_cast<int>(std::floor(d)); } Map& map_; int resolution_; }; } // End namespace waterfront. #endif // #ifndef WATERFRONT_REFINEMENT_HPP_INCLUDED
""" sentry.rules.base ~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. Rules apply either before an event gets stored, or immediately after. Basic actions: - I want to get notified when [X] - I want to group events when [X] - I want to scrub data when [X] Expanded: - I want to get notified when an event is first seen - I want to get notified when an event is marked as a regression - I want to get notified when the rate of an event increases by [100%] - I want to get notified when an event has been seen more than [100] times - I want to get notified when an event matches [conditions] - I want to group events when an event matches [conditions] Rules get broken down into two phases: - An action - A rule condition A condition itself may actually be any number of things, but that is determined by the rule's logic. Each rule condition may be associated with a form. - [ACTION:I want to get notified when] [RULE:an event is first seen] - [ACTION:I want to group events when] [RULE:an event matches [FORM]] """ from __future__ import absolute_import import logging import re from django.utils.html import escape from django.utils.safestring import mark_safe class RuleDescriptor(type): def __new__(cls, *args, **kwargs): new_cls = super(RuleDescriptor, cls).__new__(cls, *args, **kwargs) new_cls.id = '%s.%s' % (new_cls.__module__, new_cls.__name__) return new_cls class RuleBase(object): label = None form_cls = None logger = logging.getLogger('sentry.rules') __metaclass__ = RuleDescriptor def __init__(self, project, data=None): self.project = project self.data = data or {} def get_option(self, key): return self.data.get(key) def get_form_instance(self): return self.form_cls( self.data, ) def render_label(self): return self.label.format(**self.data) def render_form(self): if not self.form_cls: return self.label form = self.get_form_instance() def replace_field(match): field = match.group(1) return unicode(form[field]) return mark_safe(re.sub(r'{([^}]+)}', replace_field, escape(self.label))) def validate_form(self): if not self.form_cls: return True form = self.get_form_instance() return form.is_valid() class EventState(object): def __init__(self, is_new, is_regression, is_sample, rule_is_active): self.is_new = is_new self.is_regression = is_regression self.is_sample = is_sample, self.rule_is_active = rule_is_active
The special hostel for care of people with dementia fusion medicine specialists, hospital administrators, government health departments and the Health Insurance Commission) must respond in an appropriate wayto address the economic and logistic barriers to the effective implementation of autologous transfusion. At present, homologous blood is seenas free to the consumer, but autologous transfusion can be a direct cost to the patient, the clinician or local hospital, especially if haemodilution or salvage techniques are used.
<reponame>hiraginoyuki/deno-15puzzle import { FifteenPuzzle } from './src/15-puzzle.ts' console.log(FifteenPuzzle.generateRandom('kazukazu123123'))
<filename>pepper-core/src/main/java/com/pepper/core/base/BaseService.java<gh_stars>1-10 package com.pepper.core.base; import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.Optional; import org.springframework.data.domain.Example; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import com.pepper.core.Pager; /** * * @author mrliu * * @param <T> */ public interface BaseService<T>{ public List<T> findAll(); public List<T> findAll(Sort sort); public List<T> findAllById(Iterable<Serializable> ids); public <S extends T> List<S> saveAll(Iterable<S> entities); public <S extends T> S saveAndFlush(S entity); public void update(T entity); public void deleteInBatch(Iterable<T> entities); public void deleteAllInBatch(); public T getOne(Serializable id); public <S extends T> List<S> findAll(Example<S> example); public <S extends T> List<S> findAll(Example<S> example, Sort sort); public Page<T> findAll(Pageable pageable); public <S extends T> S save(S entity); public T findById(Serializable id); public boolean existsById(Serializable id); public long count(); public void deleteById(Serializable id); public void delete(T entity); public void deleteAll(Iterable<? extends T> entities); public void deleteAll(); public <S extends T> Optional<S> findOne(Example<S> example); public <S extends T> Page<S> findAll(Example<S> example, Pageable pageable); List<T> findAll(Map<String, Object> searchParameter); List<T> findAll(Map<String, Object> searchParameter,Map<String, Object> sortParameter); public <S extends T> long count(Example<S> example); public <S extends T> boolean exists(Example<S> example); /** * 分页查询 * @param pager * @param entity * @return */ Pager<T> findNavigator(Pager<T> pager); }
Madden NFL 15 New England Patriots Team Breakdown The New England Patriots in Madden 15 are the best they have been since the Randy Moss rocket catching days of Madden 08. Even though they don’t have too many big time wide receiver options this year, Rob Gronkowski, Stevan Ridley, and Tom Brady along with their stable of productive receivers will be hard to stop. And we haven’t even began talking about the Patriots defense this year. Lets dive right in. Team: New England Patriots Offensive Strategy: Let Tom Brady pick apart the defense. With the increased emphasis on quarterback accuracy in Madden NFL 15, quarterbacks like Tom Brady will finally be as effective in Madden as they are in the NFL on Sundays. Make sure to get the ball to Rob Gronkowski as much as you can. Defensive Strategy: Big hits and interceptions. The Patriots linebackers are excellent at forcing fumbles while the secondary should be able to force at least a few interceptions each and every game. Strengths: Secondary. The New England Patriots might have the best secondary of any team in Madden 15. It will be nearly impossible to move the ball downfield against them. Weaknesses: Team Speed. The Patriots have really good players at most positions. Unfortunately, they don’t have much speed at some of the more important Madden positions like quarterback, or running back. This shouldn’t be a big problem however. Impact Players: QB Tom Brady, HB Stevan Ridley, WR Danny Amendola, TE Rob Gronkowski, DT Vince Wilfork, ROLB Dont’a Hightower, MLB Jerod Mayo, CB Darrelle Revis, CB Brandon Browner, CB Alfonzo Dennard, FS Devin McCourty Key Newcomers: DT Dominique Easley, QB Jimmy Garopollo, RB James White, CB Darrelle Revis, CB Brandon Browner, S Patrick Chung Additional Remarks: The Patriots have a very good Madden 15. The defense is better than ever and the offense has always been a nightmare to try to stop in Madden games.
During October, Antiwar.com tallied 3,071 killed and 1,160 wounded. These figures are lower than those counted last month. At least 3,974 were killed in September, and another 1,613 were wounded. Scanning the media reports, Antiwar found that 528 civilians were reported killed, and another 602 were wounded. Security forces lost 168 members, while another 229 were wounded. The greatest number of reported casualties was among the militant groups. At least 2,375 were reported killed. Another 329 were wounded. These casualty numbers should be considered estimates. The true figures may never be known. There is evidence that the Iraqi government is undercounting deaths among their security forces and possibly exaggerating militants deaths. At least 171 were killed in the latest violence. Only one person was reported as wounded: In Mosul, militants executed a dozen teenagers who were attempting to flee their control. Ten security members were killed in Hawesh. In Bab, gunmen killed one person and wounded another. Gunmen killed a militiaman in Muqdadiya. The body of a policeman was found in Jbela. Security forces reported killing 146 militants during operations in Samarra. Read more by Margaret Griffis
/** * * @author Wolfgang Reder */ @XmlRootElement(name = "localizable") public final class XmlStringLocalizable { public static final class Entry { @XmlAttribute(name = "lang") private String lang; @XmlValue private String value; public Entry() { } public Entry(Map.Entry<String, ? extends String> e) { lang = e.getKey(); if (lang != null && lang.isBlank()) { lang = null; } value = e.getValue(); } } private List<Entry> entries; public XmlStringLocalizable() { entries = new ArrayList<>(); } public XmlStringLocalizable(Localizable<? extends String> init) { entries = init.getValues().entrySet().stream().map(Entry::new).collect(Collectors.toList()); } public Localizable<String> toLocalizable(boolean mutable) { Localizable<String> result = new StringLocalizable(true); for (Entry e : entries) { result.addValue(e.lang, e.value); } if (!mutable) { return result.toImutable(); } return result; } @XmlElement(name = "entry") public List<Entry> getEntry() { return entries; } public void setEntry(List<Entry> e) { this.entries = e; } }
George Stephanopoulos and ABC News are still in negotiations, but there’s hope within the network that a deal can be wrapped up sometime next week. Will Sundays sans George hurt ABC? No one doubts that George Stephanopoulos can rise before dawn, read all the major papers, and grill Robert Gibbs at the top of the 7 a.m. hour. But what if the morning’s guest is the latest cast-off from “Dancing with The Stars?” Story Continued Below Such questions may soon be answered if, as expected, Stephanopoulos fills the co-anchor spot on “ Good Morning America” being vacated by Diane Sawyer. Stephanopoulos and ABC News are still in negotiations, but there’s hope within the network that a deal can be wrapped up sometime next week, according to an ABC source. Some of Stephanopoulos’ colleagues have questioned whether “GMA” is the right fit for such a well-known political animal, and have even recommended he resist pressure from network higher-ups in New York and instead continue to build upon his considerable political franchise in Washington. But Stephanopoulos is now in a position that other television journalism stars have faced over the years, when the network brass shepherd them into roles that at first blush don’t seem the best use of their talents and expertise. Indeed, Sawyer was handed the “GMA” job on a temporary basis in 1999, but ended up spending 10 years on the morning show couch before getting the role many thought was a much better fit: “World News” anchor. Similarly, some critics have questioned other high-profile network moves in recent years, such as CBS's decision to hire Katie Couric who shined as an interviewer on Today as anchor of its evening news show. While Couric won awards for her 2008 election coverage, the show remains a distant third in the ratings. "These network teams have only got a certain number of positions to play and a finite number of people to move around in them, and sometimes they put a really good blocker in the quarterback position," said Syracuse University's Robert J. Thompson. "And blockers don't make good quarterbacks." However, Thompson — who directs the Bleier Center for Television and Popular Culture — said there's "no definition of what makes a good morning show person," pointing out that Tom Brokaw hosted "Today" before moving to evening anchor. So even though it remains uncertain whether Stephanopoulos will prove to be a successful "GMA" host, one thing is clear: he'd likely be trading in one of the most prestigious jobs in the industry to take that gamble. "For a political analyst, Thompson said, hosting a Sunday show is like making it to the Super Bowl." While Sunday mornings may be the gold standard for political junkies within the Beltway, it's actually mornings from Monday to Friday that have traditionally been more highly-prized by network executives. That underscores a distinct cultural difference between New York and Washington, where the Sunday talk shows are a ritual and their hosts from David Brinkley to Tim Russert to Stephanopoulos are rock stars. Network executives might not understand that, but they’re ready to play along. One reason NBC reportedly named David Gregory as Russert’s successor after the death of the popular “Meet the Press” host was its desire to keep him in the NBC family as a likely successor to Matt Lauer on “Today.”
Es hat etwas gedauert, aber mit der Kampagne „Ich bin kein Kostüm“ ist der Streit um das, was man kulturelle Aneignung nennt, nun auch einer breiten Öffentlichkeit in Deutschland bekannt. Worum es geht? Wikipedia erklärt es kurz und knapp: „Kulturelle Aneignung (engl. cultural appropriation) ist ein Begriff aus der US-amerikanischen Critical Whiteness- Bewegung, die zur Reflexion von Macht- und Diskriminierungsverhältnissen aufgrund von Hautfarbe anregen will. Bei der kapitalistischen Aneignung werden traditionelle Gegenstände der materiellen Kultur verschiedener Ethnien zur Ware erklärt und ihres Kontextes beraubt.“ Also eigentlich eine ganz wunderbare Sache: Menschen übernehmen von anderen traditionelle Kleidungsstücke, Gerichte oder Verhaltensweisen, mischen sie mit den eigenen, entdecken dabei neues, entwickeln sie weiter und haben ihren Spaß: Chinesen essen in Peking Weißwurst, aber natürlich mit Stäbchen. In Italien gibt es Spaghetti Würstel, unsere heutige Pizza kommt aus den USA, in Afrika und Asien sind Jeans beliebt, Ponchos waren in den 70er in Europa in Mode und im Moment schießen von Flüchtlingen gegründete arabische Restaurants wie Pilze aus dem Boden. Man kann sich daran stören, wenn man dem Irrglauben anhängt, es gäbe so etwas wie eine stabile, ethnische Kultur, die seit Jahrhunderten bestand hat. Menschen waren immer im Austausch miteinander und haben sich ständig gegenseitig beeinflusst. Und klar: Vieles ist dabei unter gegangen. Ist das schlimm? Nein. Auch Ideen, Mode, Gerichte stehen in einem Wettbewerb. Deutschland heute hat mit dem, was hier noch vor ein paar hundert Jahren war, kaum was zu tun: Die keltische Religion? Ein paar Nazis spielen Druiden, ber keiner weiß genau, an was die Kelten geglaubt haben. Sie hatten keine Schrift, wir wissen es schlicht nicht. Fast germanische Recht ist zum Glück auch weg und der Brei, das deutsche Traditionsessen schlechthin, ist fast vollkommen verschwunden. Kaum jemand weint ihm hinterher. Dafür mögen viele auf der ganzen Welt deutsches Brot. Kultur ist ein sich ständig ändernder Mix, sie ist nicht statisch. Aneignung ist der Normalfall. Eingeübt in Jahrtausenden Menschheitsgeschichte. Das kann man problematisieren und als Rassismus stigmatisieren. Und das versucht die Kampagne „Ich bin kein Kostüm“ zu tun. Sie übernimmt, mit mehrjähriger Verspätung eine Kampagne aus den USA, die schon da nicht für weniger Rassismus sorgte, sondern nur dafür, dass es an den Unis Streit gab, wenn Studenten mit einem Sombrero auf dem Kopf feierten, die keine Mexikaner waren. Kein Schwarzer oder Latino starb weniger durch diese Kampagnen bei Gang-Auseinandersetzungen oder durch Polizeigewalt. Kein Indianer mehr in den USA fand eine wirtschaftliche Perspektive. Das Elend diese Menschen hat Tuvia Tenenbom eindrucksvoll in seinem Buch „Allein unter Amerikanern“ beschrieben. „Ich bin kein Kostüm“ ist nicht mehr als staatlich subventionierte Empörungsindustrie. Ein Tummelplatz für Wichtigtuer, die sich um die Lösung harter Probleme drücken und versuchen, anderen ihre verquere postmoderne Sichtweise vorzuschreiben und ganz nebenbei noch Menschen gerne den Spaß verderben. Muss man sich jetzt Sorgen machen, dass wir in Zukunft häufiger mit solchen abstrusen Kampagnen belästigt werden? Ich glaube nicht. Wir erleben gerade das letzte Zucken der Empörungsindustrie. Und warum ich das glaube, habe ich vor ein paar Monaten in diesem Artikel erklärt: It´s the end of the left as we know it
Low serum concentrations of 25-hydroxyvitamin D in children and adolescents with systemic lupus erythematosus We evaluated the concentrations of 25-hydroxyvitamin D in children and adolescents with juvenile systemic lupus erythematosus (JSLE) and associated them with disease duration and activity, use of medication (chloroquine and glucocorticoids), vitamin D intake, calcium and alkaline phosphatase levels, and bone mineral density. Thirty patients with JSLE were evaluated and compared to 30 healthy individuals, who were age and gender matched. Assessment was performed of clinical status, disease activity, anthropometry, laboratory markers, and bone mineral density. The 30 patients included 25 (83.3%) females and 16 (53.3%) Caucasians, with a mean age of 13.7 years. The mean age at diagnosis was 10.5 years and mean disease duration was 3.4 years. Mean levels of calcium, albumin, and alkaline phosphatase were significantly lower in patients with JSLE compared with controls (P<0.001, P=0.006, and P<0.001, respectively). Twenty-nine patients (97%) and 23 controls (77%) had 25(OH)D concentrations lower than 32 ng/mL, with significant differences between them (P<0.001). Fifteen patients (50%) had vitamin D levels <20 ng/mL and 14 had vitamin D levels between 20 and 32 ng/mL. However, these values were not associated with greater disease activity, higher levels of parathormone, medication intake, or bone mineral density. Vitamin D concentrations were similar with regard to ethnic group, body mass index, height for age, and pubertal stage. Significantly more frequently than in controls, we observed insufficient serum concentrations of 25(OH)D in patients with JSLE; however, we did not observe any association with disease activity, higher levels of parathormone, lower levels of alkaline phosphatase, use of medications, or bone mineral density alterations. Introduction Vitamin D is the common denominator of a group of steroids involved in the metabolism of a variety of tissues and body systems. Its classical role is the regulation of calcium homeostasis and bone formation and reabsorption through interaction with the parathyroid glands, kidneys, and intestine (1). Over the past few years, identification of 1a-hydroxylase and 24a-hydroxylase, responsible for the production of 1,25-dihydroxycholecalciferol , and the vitamin D receptors in the cells for innate immunity highlighted the importance of this micronutrient in immune regulation (2). Vitamin D immunomodulatory effects are suppression of adaptative immunity through a reduction in the production of interleukin (IL)-2, gamma interferon, and tumor necrosis factor, inhibition of expression of IL-6, secretion and production of autoantibodies by B lymphocytes, and activation of innate immunity (3)(4)(5)(6). Epidemiological findings indicate that low 25-hydroxyvitamin D serum concentrations are associated with the development of various diseases, especially autoimmune diseases, including insulin-dependent diabetes mellitus, multiple sclerosis, rheumatoid arthritis, systemic lupus erythematosus (SLE), and inflammatory bowel disease (5,6). Recent studies have confirmed that most adult patients (7)(8)(9)(10)(11) and children and adolescents with SLE have insufficient or deficient vitamin D concentrations associated with disease activity (12). The aim of this study was to evaluate the concentrations of 25(OH)D in children and adolescents with juvenile SLE (JSLE) and to associate them with disease duration Correspondence: M.T.R.A. Terreri, Rua Ipeˆ, 112/111, 04022-005 Sa˜o Paulo, SP, Brasil. Fax: + +55-11-5579-1590. E-mail: [email protected] and activity, use of medication (hydroxychloroquine and glucocorticoids), vitamin D intake, calcium and alkaline phosphatase levels, and bone mineral density. Material and Methods In a cross-sectional study, 30 patients with JSLE according to Hochberg criteria (13) who were monitored in the Pediatric Rheumatology outpatient clinic were evaluated during the spring of 2008. The inclusion criteria were a minimum disease duration of 6 months. Patients were excluded if they could not perform bone density measurements due to incompatible stature (1), or if they had other chronic diseases interfering with calcium, phosphorus, and vitamin D metabolism, or if they did not consent to participate in the study. The control group consisted of 30 healthy individuals, followed in a primary medical care facility, who were gender and age (maximum difference of 6 months) matched with the patients, who were not using supplements containing vitamin D or calcium, and who had no impairment in calcium, phosphorus, or vitamin D metabolism. Demographic, clinical, and treatment data were collected from the patient charts from the visit closest to the laboratory exams. The data for disease activity and medications were also recorded. Anthropometric assessment involved weight and height measurements, using the reference proposed by the World Health Organization (WHO) (14,15). Pubertal development was determined as recommended by Marshall and Tanner (16). Blood samples from the patients and controls were collected in the morning after a 12-h fast for the following tests: 25(OH)D, parathormone (PTH), serum calcium, serum phosphorus, alkaline phosphatase, serum albumin, serum urea, and serum creatinine. The patients were also tested for ionized calcium, creatinine clearance, calciuria and creatinuria in an isolated sample. The calciuria and creatinuria ratio was considered abnormal if .0.2. The concentration of 25(OH)D was measured by electrochemiluminescence immunoassay using the commercial kit Elecsys 25(OH)D3 assay (Modular Roche, Brazil), and intact PTH was measured by immunofluorimetric assay in 2009. The reference values for 25(OH)D were as follows: ,20 ng/mL as deficiency, between 20 and 32 ng/mL as insufficiency, and .32 ng/mL as normal (18). For PTH the normal range was from 15 to 65 pg/mL. Serum phosphorus, serum calcium, serum urea, serum albumin, isolated calciuria, creatinuria, and creatinine clearance were measured by colorimetric analysis (Modular Roche, Brazil), serum alkaline phosphatase was measured by an enzyme activity method, and ionized calcium was measured using a potentiometer. Bone mineral density of the spine (L1-L4) was measured by dual energy X-ray absorptiometry (DXA) using a LUNAR TM DPX-MD plus machine (GE-Lunar Radiation Corporation, USA) equipped with pediatric software (version 8.5) and adjusted to stature (19). The variation coefficient for the spine, which was checked daily according to the manufacturer's specifications, was 2%. The interval between the collection of medical records and of blood exams and the measuring of bone mineral density ranged from 0 to 90 days. Statistical analyses were carried out with the statistical software Minitab 15 (Minitab Inc., USA). To evaluate the association between dichotomous variables, the chisquare test was applied, and, when indicated, the Fisher test was applied. The normality of continuous variables was established by means of the Kolmogorov-Smirnov test and reported as means±SD and the Student t-test was used to compare them. When both variables were symmetric, Pearson's correlation coefficient was used. A value of a ,5% was adopted (P,0.05). Informed consent forms were signed by all patients and controls and/or their legal guardians. The review board of Universidade Federal de Sa˜o Paulo approved the study protocol. Results Of the 30 children and adolescents with JSLE, 25 (83.3%) were female, 16 were Caucasian (53.3%), and the current mean age was 13.7 years (7-18 years). The mean age at diagnosis was 10.5 years (3-15 years) and the mean disease duration was 3.4 years (0.5-12.2 years). All patients used sunblock and avoided exposure to sunlight. The control group consisted of 30 healthy volunteers, 25 were female (83.3%), 16 were Caucasian (53.3%) with a mean age of 13.6 years (6-19 years). Table 1 displays the characteristics of the groups studied. Nineteen patients (63%) had renal involvement at the time of diagnosis and 10 of 30 (33%) had renal disease activity at the time of the evaluation. No patient had renal function impairment. The calciuria/creatinuria ratio was normal in all patients. Twelve (40%) patients had low bone mineral density for their age. The abnormal values of DXA did not associate with lower 25(OH)D concentrations (>20, ,20 or >32 and ,32 ng/mL, with P=0.710 and P=0.400, respectively). However, there was a significant association between cumulative glucocorticoid dose and low bone mineral density (P,0.001). We did not find an association between vitamin D concentrations and ethnicity (Caucasian and non-Caucasian; P=0.083), body mass index (BMI; P=0.955), height for age ratio (P=0.646), or pubertal stage (P=0.524). We observed a positive correlation between 25(OH)D concentrations and serum calcium levels (P=0.017 and r=0.306) and serum albumin levels (P=0.01 and r=0.316). For PTH, we found a trend toward an inverse correlation with vitamin D concentrations (P=0.057 and r= --0.247). Discussion Vitamin D deficiency is often seen in patients with SLE (7-9). Wright et al. (12), when evaluating 38 individuals with JSLE, observed a high frequency of severe vitamin D deficiency (,10 ng/mL), with a significant difference compared to controls. In our study, the frequency of 25(OH)D deficiency was 50%, with a significant difference compared to controls. This percentage was higher than that found by Wright et al. (12) and that described in a study on adults with SLE (20). A possible explanation for the elevated frequency of deficiency/insufficiency that we observed may be the season of sample collection, that is, spring. Although Brazil is a tropical country, sunny all year, we cannot exclude that, in the southern regions, winter and spring are colder seasons, and this population is more protected by winter clothing. Thudi et al. (21) reported 25(OH)D concentrations lower than 32 ng/mL (80 nmol/L) in 65% of the SLE patients, of Hispanic-American and African-American origin, and the values found in winter and spring were significantly lower than those in summer and fall (21). Photosensitivity and the consequent use of sunblock, renal function impairment, presence of auto-antibodies against vitamin D, and use of glucocorticoids are risk factors for vitamin D deficiency in SLE patients (6,22). For chloroquine, there is no consensus yet, and initial reports point out that it may cause possible interference to vitamin D metabolism, resulting in a lowering of its concentration (23); however, a more recent study of patients with SLE found a protective effect for this vitamin deficiency (20). In our study, chloroquine was not associated with vitamin D concentration or the use of glucocorticoids. Despite vitamin D supplementation (doses of about 400 IU), we did not observe any benefit on the prevention of this vitamin deficiency. There is no consensus in the literature on the ideal vitamin D concentration. Recently, Abrams (24) suggested that 25(OH)D concentrations greater than 20 ng/ mL would be sufficient to meet the needs of most children and adults. Gordon et al. (25) in 2008 evaluated 365 healthy children and found 25(OH)D concentrations of ,20 ng/ mL in 12.1% and ,30 ng/mL in 40%. In a study evaluating healthy North American children between 2001 and 2004, 9% deficiency and 61% insufficiency were observed, i.e., 70% of the individuals had serum concentrations lower than 32 ng/mL (26). According to Greer (27), we should define the nutritional status related to vitamin D based not only on 25(OH)D concentration, but also on the genetic, environmental, and dietetic factors involved with vitamin D and calcium and bone mineralization. Despite the high frequency of 25(OH)D deficiency that we observed in JSLE patients, we did not verify a significant alteration in levels of alkaline phosphatase, PTH, or bone mineral density. The use of glucocorticoids may have lowered the alkaline phosphatase levels. Serum calcium was statistically lower in the patients compared to the controls; however, an expected hyperphosphatemia due to low calcium levels did not occur, nor did abnormal levels of ionized calcium. In our study, we observed a direct and significant correlation between vitamin D concentrations and serum calcium levels. Although PTH levels did not show an inverse correlation with vitamin D concentration, a trend was observed. This proves that the action of PTH is often stimulated when serum calcium levels are low due to a decrease in vitamin D (24,25,27). Approximately one third of the patients had low bone mineral density, however, without any association with vitamin D concentration. Although this is an unexpected finding, we know that low bone mineral density can be influenced by other factors such as presence of inflammation and use of glucocorticoids, and those factors may explain this finding. Compeyrot-Lacassagne et al. (28) evaluated a group of 64 patients with JSLE and found that 38% had low bone mass density for their age, a result close to our findings of 40%. These authors also found a significant association between cumulative glucocorticoid dose and low bone mineral density (P,0.001), the same as observed in our study. Regarding disease activity and vitamin D deficiency, the current data are controversial (11,20,(29)(30)(31). Our sample was characterized by JSLE patients with low disease activity (mean value of SLEDAI-2K score was 4.4, 63.4% of the patients with SLEDAI-2K .1 and 33.4% .4). Wright et al. (12) found an association between vitamin D deficiency and greater disease activity; however, they could not establish a causal relationship between them. Casella et al. (32) also found an association between vitamin D deficiency and negative effects on bone health and disease activity in a cohort of pediatric lupus patients in spite of conventional vitamin D supplementation. This finding was independent of therapy and fat mass, and they suggested that those patients might need higher doses of vitamin D supplementation. Kamen et al. (22) suggested that vitamin D supplementation in SLE patients may have a beneficial effect on the immune system in addition to the impact on bone metabolism. There is still no consensus in the literature on the vitamin D dosage to be supplemented in patients with JSLE. Kamen (33) stated in a recent review paper that a clinical trial comparing the use of 800 to 2000 or 4000 IU of vitamin D is in progress. In the present study, BMI and height to age ratio did not influence vitamin D concentration, and, when we assessed only patients and controls with adequate weight and stature, we found a significant difference regarding their vitamin D concentrations. It is well known that renal failure leads to limitations in the conversion of inactive to active forms of vitamin D; however, we did not observe renal impairment in any patient, and this has not been an aggravating factor in vitamin D concentrations (1,34). Regarding ethnicity, it is known that non-Caucasians usually have lower vitamin D concentrations due to limitations in vitamin D production from solar exposure in those individuals; however, we did not find a statistical difference between the two races (1). This finding is controversial, since our study population had a high mixture of ethnicities, which hampered a precise classification. There are some limitations in the present study, such as the small sample size, the cross-sectional model used which does not allow causal evaluations, and the lack of DXA in the control group. Also, the method applied in the determination of 25(OH)D concentrations may be questioned, as well as the values considered normal. Finally, according to our findings, JSLE patients had serum vitamin D concentrations significantly lower than controls, without being associated with greater disease activity, higher levels of PTH, or lower bone mineral density.
// RUN: %bmc -bound 1 "%s" | FileCheck "%s" // CHECK: Verification FAILED #include <assert.h> float __VERIFIER_nondet_float(void); int main(void) { double x = __VERIFIER_nondet_float(); assert(x != 150.0); return 0; }
<filename>plugins/xevents/Xlib/xobject/colormap.py<gh_stars>0 # $Id: colormap.py,v 1.6 2007/06/10 14:11:59 mggrant Exp $ # # Xlib.xobject.colormap -- colormap object # # Copyright (C) 2000 <NAME> <<EMAIL>> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA from Xlib import error from Xlib.protocol import request import resource import re import string rgb_res = [ re.compile(r'\Argb:([0-9a-fA-F]{1,4})/([0-9a-fA-F]{1,4})/([0-9a-fA-F]{1,4})\Z'), re.compile(r'\A#([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])\Z'), re.compile(r'\A#([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])\Z'), re.compile(r'\A#([0-9a-fA-F][0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F][0-9a-fA-F])\Z'), re.compile(r'\A#([0-9a-fA-F][0-9a-fA-F][0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F][0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F][0-9a-fA-F][0-9a-fA-F])\Z'), ] class Colormap(resource.Resource): __colormap__ = resource.Resource.__resource__ def free(self, onerror = None): request.FreeColormap(display = self.display, onerror = onerror, cmap = self.id) self.display.free_resource_id(self.id) def copy_colormap_and_free(self, scr_cmap): mid = self.display.allocate_resource_id() request.CopyColormapAndFree(display = self.display, mid = mid, src_cmap = src_cmap) cls = self.display.get_resource_class('colormap', Colormap) return cls(self.display, mid, owner = 1) def install_colormap(self, onerror = None): request.InstallColormap(display = self.display, onerror = onerror, cmap = self.id) def uninstall_colormap(self, onerror = None): request.UninstallColormap(display = self.display, onerror = onerror, cmap = self.id) def alloc_color(self, red, green, blue): return request.AllocColor(display = self.display, cmap = self.id, red = red, green = green, blue = blue) def alloc_named_color(self, name): for r in rgb_res: m = r.match(name) if m: rs = m.group(1) r = string.atoi(rs + '0' * (4 - len(rs)), 16) gs = m.group(2) g = string.atoi(gs + '0' * (4 - len(gs)), 16) bs = m.group(3) b = string.atoi(bs + '0' * (4 - len(bs)), 16) return self.alloc_color(r, g, b) try: return request.AllocNamedColor(display = self.display, cmap = self.id, name = name) except error.BadName: return None def alloc_color_cells(self, contiguous, colors, planes): return request.AllocColorCells(display = self.display, contiguous = contiguous, cmap = self.id, colors = colors, planes = planes) def alloc_color_planes(self, contiguous, colors, red, green, blue): return request.AllocColorPlanes(display = self.display, contiguous = contiguous, cmap = self.id, colors = colors, red = red, green = green, blue = blue) def free_colors(self, pixels, plane_mask, onerror = None): request.FreeColors(display = self.display, onerror = onerror, cmap = self.id, plane_mask = plane_mask, pixels = pixels) def store_colors(self, items, onerror = None): request.StoreColors(display = self.display, onerror = onerror, cmap = self.id, items = items) def store_named_color(self, name, pixel, flags, onerror = None): request.StoreNamedColor(display = self.display, onerror = onerror, flags = flags, cmap = self.id, pixel = pixel, name = name) def query_colors(self, pixels): r = request.QueryColors(display = self.display, cmap = self.id, pixels = pixels) return r.colors def lookup_color(self, name): return request.LookupColor(display = self.display, cmap = self.id, name = name)
// returns the type the interface t points of reflect.Type func IndirectType(t reflect.Type) reflect.Type { for reflect.Ptr == t.Kind() { t = t.Elem() } return t }