content
stringlengths
10
4.9M
import { Request, Response } from "express"; import Controller from "./Controller"; import Project from '../classes/Project' import Organization from "../classes/Organization"; export default class ProjectController implements Controller { public static getAll = async (req: Request, res: Response) => { const ProjectModel = new Project().getModelForClass(Project); try { const projects = await ProjectModel.find(); if (!projects) { res.status(404).send(); } res.send(projects); } catch (error) { res.status(500).send(); } } public static getOneById = async (req: Request, res: Response) => { const ProjectModel = new Project().getModelForClass(Project); try { const projects = await ProjectModel.findById(req.params.id); if (!projects) { res.status(404).send(); } res.send(projects); } catch (error) { res.status(500).send(); } } public static insert = async (req: Request, res: Response) => { const ProjectModel = new Project().getModelForClass(Project); try { const project = await ProjectModel.findOne({title: req.body.title}); if (project) { res.status(403).send({ message: 'Project already exists.' }); return } const newProject = await ProjectModel.create(req.body); const OrgModel = new Organization().getModelForClass(Organization); const { organization } = res.locals.jwtPayload; const org = await OrgModel.findByIdAndUpdate( organization, { $push: { projects: newProject, }, }, { new:true } ); if (!org) res.status(500).send({ message: 'Organization not found!' }); res.send(newProject); } catch (error) { res.status(500).send(); } }; public static update = async (req: Request, res: Response) => { const ProjectModel = new Project().getModelForClass(Project); try { const project = await ProjectModel.findByIdAndUpdate( req.params.id, req.body, { new:true } ); if (!project) res.status(400).send({ message: 'Project not found!' }); res.send({ message: 'Succesfully updated the project!' }); } catch (error) { res.status(500).send(); } } public static delete = async (req: Request, res: Response) => { const ProjectModel = new Project().getModelForClass(Project); try { await ProjectModel.deleteOne({ _id: req.params.id }); res.send({ message: 'Succesfully deleted the project!' }); } catch (error) { res.status(500).send(); } } }
def permute_graph(G): G1 = nx.copy.deepcopy(G) d = nx.density(G1) r = random.random() if (r < 0.5 or d == 0) and d != 1: nodes = G.nodes() n1 = random.choice(nodes) n2 = random.choice(nodes) G1.add_edge(n1, n2) else: n1, n2 = random.choice(G1.edges()) G1.remove_edge(n1, n2) return G1
// InitCCentralService returns service struct for easier configuration access func InitCCentralService(cc CCApi, serviceID string) *CCentralService { service := CCentralService{ servideID: serviceID, schema: make(map[string]SchemaItem), config: make(map[string]ConfigItem), cc: cc} return &service }
#line 10 "headerlexer.l" /* * This file is part of Wireless Display Software for Linux OS * * Copyright (C) 2015 Intel Corporation. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA */ #include <stdint.h> #define YY_NO_UNISTD_H 1 #define YY_INT_ALIGNED short int /* A lexical scanner generated by flex */ #define FLEX_SCANNER #define YY_FLEX_MAJOR_VERSION 2 #define YY_FLEX_MINOR_VERSION 6 #define YY_FLEX_SUBMINOR_VERSION 4 #if YY_FLEX_SUBMINOR_VERSION > 0 #define FLEX_BETA #endif #ifdef yy_create_buffer #define header__create_buffer_ALREADY_DEFINED #else #define yy_create_buffer header__create_buffer #endif #ifdef yy_delete_buffer #define header__delete_buffer_ALREADY_DEFINED #else #define yy_delete_buffer header__delete_buffer #endif #ifdef yy_scan_buffer #define header__scan_buffer_ALREADY_DEFINED #else #define yy_scan_buffer header__scan_buffer #endif #ifdef yy_scan_string #define header__scan_string_ALREADY_DEFINED #else #define yy_scan_string header__scan_string #endif #ifdef yy_scan_bytes #define header__scan_bytes_ALREADY_DEFINED #else #define yy_scan_bytes header__scan_bytes #endif #ifdef yy_init_buffer #define header__init_buffer_ALREADY_DEFINED #else #define yy_init_buffer header__init_buffer #endif #ifdef yy_flush_buffer #define header__flush_buffer_ALREADY_DEFINED #else #define yy_flush_buffer header__flush_buffer #endif #ifdef yy_load_buffer_state #define header__load_buffer_state_ALREADY_DEFINED #else #define yy_load_buffer_state header__load_buffer_state #endif #ifdef yy_switch_to_buffer #define header__switch_to_buffer_ALREADY_DEFINED #else #define yy_switch_to_buffer header__switch_to_buffer #endif #ifdef yypush_buffer_state #define header_push_buffer_state_ALREADY_DEFINED #else #define yypush_buffer_state header_push_buffer_state #endif #ifdef yypop_buffer_state #define header_pop_buffer_state_ALREADY_DEFINED #else #define yypop_buffer_state header_pop_buffer_state #endif #ifdef yyensure_buffer_stack #define header_ensure_buffer_stack_ALREADY_DEFINED #else #define yyensure_buffer_stack header_ensure_buffer_stack #endif #ifdef yylex #define header_lex_ALREADY_DEFINED #else #define yylex header_lex #endif #ifdef yyrestart #define header_restart_ALREADY_DEFINED #else #define yyrestart header_restart #endif #ifdef yylex_init #define header_lex_init_ALREADY_DEFINED #else #define yylex_init header_lex_init #endif #ifdef yylex_init_extra #define header_lex_init_extra_ALREADY_DEFINED #else #define yylex_init_extra header_lex_init_extra #endif #ifdef yylex_destroy #define header_lex_destroy_ALREADY_DEFINED #else #define yylex_destroy header_lex_destroy #endif #ifdef yyget_debug #define header_get_debug_ALREADY_DEFINED #else #define yyget_debug header_get_debug #endif #ifdef yyset_debug #define header_set_debug_ALREADY_DEFINED #else #define yyset_debug header_set_debug #endif #ifdef yyget_extra #define header_get_extra_ALREADY_DEFINED #else #define yyget_extra header_get_extra #endif #ifdef yyset_extra #define header_set_extra_ALREADY_DEFINED #else #define yyset_extra header_set_extra #endif #ifdef yyget_in #define header_get_in_ALREADY_DEFINED #else #define yyget_in header_get_in #endif #ifdef yyset_in #define header_set_in_ALREADY_DEFINED #else #define yyset_in header_set_in #endif #ifdef yyget_out #define header_get_out_ALREADY_DEFINED #else #define yyget_out header_get_out #endif #ifdef yyset_out #define header_set_out_ALREADY_DEFINED #else #define yyset_out header_set_out #endif #ifdef yyget_leng #define header_get_leng_ALREADY_DEFINED #else #define yyget_leng header_get_leng #endif #ifdef yyget_text #define header_get_text_ALREADY_DEFINED #else #define yyget_text header_get_text #endif #ifdef yyget_lineno #define header_get_lineno_ALREADY_DEFINED #else #define yyget_lineno header_get_lineno #endif #ifdef yyset_lineno #define header_set_lineno_ALREADY_DEFINED #else #define yyset_lineno header_set_lineno #endif #ifdef yyget_column #define header_get_column_ALREADY_DEFINED #else #define yyget_column header_get_column #endif #ifdef yyset_column #define header_set_column_ALREADY_DEFINED #else #define yyset_column header_set_column #endif #ifdef yywrap #define header_wrap_ALREADY_DEFINED #else #define yywrap header_wrap #endif #ifdef yyget_lval #define header_get_lval_ALREADY_DEFINED #else #define yyget_lval header_get_lval #endif #ifdef yyset_lval #define header_set_lval_ALREADY_DEFINED #else #define yyset_lval header_set_lval #endif #ifdef yyalloc #define header_alloc_ALREADY_DEFINED #else #define yyalloc header_alloc #endif #ifdef yyrealloc #define header_realloc_ALREADY_DEFINED #else #define yyrealloc header_realloc #endif #ifdef yyfree #define header_free_ALREADY_DEFINED #else #define yyfree header_free #endif /* First, we deal with platform-specific or compiler-specific issues. */ /* begin standard C headers. */ #include <stdio.h> #include <string.h> #include <errno.h> #include <stdlib.h> /* end standard C headers. */ /* flex integer type definitions */ #ifndef FLEXINT_H #define FLEXINT_H /* C99 systems have <inttypes.h>. Non-C99 systems may or may not. */ #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L /* C99 says to define __STDC_LIMIT_MACROS before including stdint.h, * if you want the limit (max/min) macros for int types. */ #ifndef __STDC_LIMIT_MACROS #define __STDC_LIMIT_MACROS 1 #endif #include <inttypes.h> typedef int8_t flex_int8_t; typedef uint8_t flex_uint8_t; typedef int16_t flex_int16_t; typedef uint16_t flex_uint16_t; typedef int32_t flex_int32_t; typedef uint32_t flex_uint32_t; #else typedef signed char flex_int8_t; typedef short int flex_int16_t; typedef int flex_int32_t; typedef unsigned char flex_uint8_t; typedef unsigned short int flex_uint16_t; typedef unsigned int flex_uint32_t; /* Limits of integral types. */ #ifndef INT8_MIN #define INT8_MIN (-128) #endif #ifndef INT16_MIN #define INT16_MIN (-32767-1) #endif #ifndef INT32_MIN #define INT32_MIN (-2147483647-1) #endif #ifndef INT8_MAX #define INT8_MAX (127) #endif #ifndef INT16_MAX #define INT16_MAX (32767) #endif #ifndef INT32_MAX #define INT32_MAX (2147483647) #endif #ifndef UINT8_MAX #define UINT8_MAX (255U) #endif #ifndef UINT16_MAX #define UINT16_MAX (65535U) #endif #ifndef UINT32_MAX #define UINT32_MAX (4294967295U) #endif #ifndef SIZE_MAX #define SIZE_MAX (~(size_t)0) #endif #endif /* ! C99 */ #endif /* ! FLEXINT_H */ /* begin standard C++ headers. */ /* TODO: this is always defined, so inline it */ #define yyconst const #if defined(__GNUC__) && __GNUC__ >= 3 #define yynoreturn __attribute__((__noreturn__)) #else #define yynoreturn #endif /* Returned upon end-of-file. */ #define YY_NULL 0 /* Promotes a possibly negative, possibly signed char to an * integer in range [0..255] for use as an array index. */ #define YY_SC_TO_UI(c) ((YY_CHAR)(c)) /* An opaque pointer. */ #ifndef YY_TYPEDEF_YY_SCANNER_T #define YY_TYPEDEF_YY_SCANNER_T typedef void* yyscan_t; #endif /* For convenience, these vars (plus the bison vars far below) are macros in the reentrant scanner. */ #define yyin yyg->yyin_r #define yyout yyg->yyout_r #define yyextra yyg->yyextra_r #define yyleng yyg->yyleng_r #define yytext yyg->yytext_r #define yylineno (YY_CURRENT_BUFFER_LVALUE->yy_bs_lineno) #define yycolumn (YY_CURRENT_BUFFER_LVALUE->yy_bs_column) #define yy_flex_debug yyg->yy_flex_debug_r /* Enter a start condition. This macro really ought to take a parameter, * but we do it the disgusting crufty way forced on us by the ()-less * definition of BEGIN. */ #define BEGIN yyg->yy_start = 1 + 2 * /* Translate the current start state into a value that can be later handed * to BEGIN to return to the state. The YYSTATE alias is for lex * compatibility. */ #define YY_START ((yyg->yy_start - 1) / 2) #define YYSTATE YY_START /* Action number for EOF rule of a given start state. */ #define YY_STATE_EOF(state) (YY_END_OF_BUFFER + state + 1) /* Special action meaning "start processing a new file". */ #define YY_NEW_FILE yyrestart(yyin, yyscanner) #define YY_END_OF_BUFFER_CHAR 0 /* Size of default input buffer. */ #ifndef YY_BUF_SIZE #ifdef __ia64__ /* On IA-64, the buffer size is 16k, not 8k. * Moreover, YY_BUF_SIZE is 2*YY_READ_BUF_SIZE in the general case. * Ditto for the __ia64__ case accordingly. */ #define YY_BUF_SIZE 32768 #else #define YY_BUF_SIZE 16384 #endif /* __ia64__ */ #endif /* The state buf must be large enough to hold one state per character in the main buffer. */ #define YY_STATE_BUF_SIZE ((YY_BUF_SIZE + 2) * sizeof(yy_state_type)) #ifndef YY_TYPEDEF_YY_BUFFER_STATE #define YY_TYPEDEF_YY_BUFFER_STATE typedef struct yy_buffer_state *YY_BUFFER_STATE; #endif #ifndef YY_TYPEDEF_YY_SIZE_T #define YY_TYPEDEF_YY_SIZE_T typedef size_t yy_size_t; #endif #define EOB_ACT_CONTINUE_SCAN 0 #define EOB_ACT_END_OF_FILE 1 #define EOB_ACT_LAST_MATCH 2 #define YY_LESS_LINENO(n) #define YY_LINENO_REWIND_TO(ptr) /* Return all but the first "n" matched characters back to the input stream. */ #define yyless(n) \ do \ { \ /* Undo effects of setting up yytext. */ \ int yyless_macro_arg = (n); \ YY_LESS_LINENO(yyless_macro_arg);\ *yy_cp = yyg->yy_hold_char; \ YY_RESTORE_YY_MORE_OFFSET \ yyg->yy_c_buf_p = yy_cp = yy_bp + yyless_macro_arg - YY_MORE_ADJ; \ YY_DO_BEFORE_ACTION; /* set up yytext again */ \ } \ while ( 0 ) #define unput(c) yyunput( c, yyg->yytext_ptr , yyscanner ) #ifndef YY_STRUCT_YY_BUFFER_STATE #define YY_STRUCT_YY_BUFFER_STATE struct yy_buffer_state { FILE *yy_input_file; char* yy_ch_buf; /* input buffer */ char* yy_buf_pos; /* current position in input buffer */ /* Size of input buffer in bytes, not including room for EOB * characters. */ int yy_buf_size; /* Number of characters read into yy_ch_buf, not including EOB * characters. */ int yy_n_chars; /* Whether we "own" the buffer - i.e., we know we created it, * and can realloc() it to grow it, and should free() it to * delete it. */ int yy_is_our_buffer; /* Whether this is an "interactive" input source; if so, and * if we're using stdio for input, then we want to use getc() * instead of fread(), to make sure we stop fetching input after * each newline. */ int yy_is_interactive; /* Whether we're considered to be at the beginning of a line. * If so, '^' rules will be active on the next match, otherwise * not. */ int yy_at_bol; int yy_bs_lineno; /**< The line count. */ int yy_bs_column; /**< The column count. */ /* Whether to try to fill the input buffer when we reach the * end of it. */ int yy_fill_buffer; int yy_buffer_status; #define YY_BUFFER_NEW 0 #define YY_BUFFER_NORMAL 1 /* When an EOF's been seen but there's still some text to process * then we mark the buffer as YY_EOF_PENDING, to indicate that we * shouldn't try reading from the input source any more. We might * still have a bunch of tokens to match, though, because of * possible backing-up. * * When we actually see the EOF, we change the status to "new" * (via yyrestart()), so that the user can continue scanning by * just pointing yyin at a new input file. */ #define YY_BUFFER_EOF_PENDING 2 }; #endif /* !YY_STRUCT_YY_BUFFER_STATE */ /* We provide macros for accessing buffer states in case in the * future we want to put the buffer states in a more general * "scanner state". * * Returns the top of the stack, or NULL. */ #define YY_CURRENT_BUFFER ( yyg->yy_buffer_stack \ ? yyg->yy_buffer_stack[yyg->yy_buffer_stack_top] \ : NULL) /* Same as previous macro, but useful when we know that the buffer stack is not * NULL or when we need an lvalue. For internal use only. */ #define YY_CURRENT_BUFFER_LVALUE yyg->yy_buffer_stack[yyg->yy_buffer_stack_top] void yyrestart(FILE* input_file, yyscan_t yyscanner); void yy_switch_to_buffer(YY_BUFFER_STATE new_buffer, yyscan_t yyscanner); YY_BUFFER_STATE yy_create_buffer(FILE* file, int size, yyscan_t yyscanner); void yy_delete_buffer(YY_BUFFER_STATE b, yyscan_t yyscanner); void yy_flush_buffer(YY_BUFFER_STATE b, yyscan_t yyscanner); void yypush_buffer_state(YY_BUFFER_STATE new_buffer, yyscan_t yyscanner); void yypop_buffer_state(yyscan_t yyscanner); static void yyensure_buffer_stack(yyscan_t yyscanner); static void yy_load_buffer_state(yyscan_t yyscanner); static void yy_init_buffer(YY_BUFFER_STATE b, FILE* file, yyscan_t yyscanner); #define YY_FLUSH_BUFFER yy_flush_buffer(YY_CURRENT_BUFFER, yyscanner) YY_BUFFER_STATE yy_scan_buffer(char* base, yy_size_t size, yyscan_t yyscanner); YY_BUFFER_STATE yy_scan_string(const char* yy_str, yyscan_t yyscanner); YY_BUFFER_STATE yy_scan_bytes(const char* bytes, int len, yyscan_t yyscanner); void* yyalloc(yy_size_t, yyscan_t yyscanner); void* yyrealloc(void*, yy_size_t, yyscan_t yyscanner); void yyfree(void*, yyscan_t yyscanner); #define yy_new_buffer yy_create_buffer #define yy_set_interactive(is_interactive) \ { \ if (!YY_CURRENT_BUFFER) { \ yyensure_buffer_stack(yyscanner); \ YY_CURRENT_BUFFER_LVALUE = \ yy_create_buffer(yyin, YY_BUF_SIZE, yyscanner); \ } \ YY_CURRENT_BUFFER_LVALUE->yy_is_interactive = is_interactive; \ } #define yy_set_bol(at_bol) \ { \ if (!YY_CURRENT_BUFFER) { \ yyensure_buffer_stack(yyscanner); \ YY_CURRENT_BUFFER_LVALUE = \ yy_create_buffer(yyin, YY_BUF_SIZE, yyscanner); \ } \ YY_CURRENT_BUFFER_LVALUE->yy_at_bol = at_bol; \ } #define YY_AT_BOL() (YY_CURRENT_BUFFER_LVALUE->yy_at_bol) /* Begin user sect3 */ #define header_wrap(yyscanner) (/*CONSTCOND*/ 1) #define YY_SKIP_YYWRAP typedef flex_uint8_t YY_CHAR; typedef int yy_state_type; #define yytext_ptr yytext_r static yy_state_type yy_get_previous_state(yyscan_t yyscanner); static yy_state_type yy_try_NUL_trans(yy_state_type current_state, yyscan_t yyscanner); static int yy_get_next_buffer(yyscan_t yyscanner); static void yynoreturn yy_fatal_error(const char* msg, yyscan_t yyscanner); /* Done after the current pattern has been matched and before the * corresponding action - sets up yytext. */ #define YY_DO_BEFORE_ACTION \ yyg->yytext_ptr = yy_bp; \ yyleng = (int)(yy_cp - yy_bp); \ yyg->yy_hold_char = *yy_cp; \ *yy_cp = '\0'; \ yyg->yy_c_buf_p = yy_cp; #define YY_NUM_RULES 46 #define YY_END_OF_BUFFER 47 /* This struct is not used in this scanner, but its presence is necessary. */ struct yy_trans_info { flex_int32_t yy_verify; flex_int32_t yy_nxt; }; static const flex_int16_t yy_accept[646] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 47, 45, 11, 2, 3, 7, 6, 5, 10, 42, 9, 8, 4, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 31, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 5, 42, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 30, 7, 6, 5, 10, 30, 9, 4, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 11, 1, 42, 0, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 30, 30, 30, 29, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 0, 0, 0, 0, 0, 0, 0, 18, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 0, 0, 0, 0, 0, 0, 0, 30, 30, 30, 30, 30, 30, 18, 30, 30, 30, 30, 30, 30, 30, 30, 0, 0, 0, 0, 0, 22, 0, 0, 20, 0, 0, 0, 29, 0, 17, 0, 0, 0, 0, 0, 0, 39, 36, 0, 0, 0, 0, 0, 0, 20, 39, 17, 36, 0, 0, 0, 0, 0, 0, 0, 20, 0, 0, 0, 0, 17, 0, 0, 30, 30, 30, 22, 30, 30, 20, 30, 30, 30, 29, 30, 17, 30, 30, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 41, 0, 0, 0, 0, 41, 0, 0, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 0, 0, 0, 0, 0, 14, 23, 0, 0, 0, 0, 0, 0, 0, 33, 0, 0, 0, 0, 0, 14, 33, 0, 0, 0, 0, 0, 14, 0, 0, 0, 0, 30, 30, 30, 30, 14, 23, 30, 30, 30, 30, 30, 30, 0, 0, 43, 44, 0, 0, 29, 43, 27, 0, 19, 0, 0, 0, 0, 38, 0, 0, 0, 0, 19, 38, 0, 19, 0, 30, 30, 30, 30, 29, 30, 27, 30, 19, 30, 0, 12, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 30, 30, 30, 30, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 30, 30, 30, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 30, 30, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 30, 30, 30, 13, 0, 25, 16, 0, 15, 0, 35, 0, 34, 16, 35, 15, 34, 0, 30, 25, 16, 15, 0, 0, 0, 40, 0, 30, 26, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0}; static const YY_CHAR yy_ec[256] = { 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 1, 1, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 6, 1, 7, 8, 9, 10, 11, 12, 13, 13, 13, 13, 13, 13, 13, 13, 14, 15, 1, 16, 1, 1, 1, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 26, 39, 26, 1, 1, 1, 1, 40, 1, 41, 42, 43, 44, 45, 46, 47, 48, 49, 26, 26, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 26, 62, 26, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}; static const YY_CHAR yy_meta[63] = { 0, 1, 2, 3, 4, 2, 1, 1, 5, 1, 6, 5, 5, 5, 7, 8, 1, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 7, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5}; static const flex_int16_t yy_base[659] = { 0, 0, 56, 14, 17, 23, 26, 117, 153, 211, 267, 328, 384, 1713, 2645, 23, 2645, 1704, 2645, 2645, 2645, 2645, 29, 2645, 100, 2645, 91, 1660, 125, 10, 18, 127, 132, 124, 147, 194, 151, 123, 0, 32, 2, 198, 215, 115, 121, 183, 323, 433, 188, 434, 331, 435, 437, 336, 449, 1654, 40, 1647, 442, 224, 474, 226, 462, 489, 497, 499, 505, 507, 514, 520, 0, 0, 0, 0, 0, 180, 0, 0, 127, 357, 576, 587, 564, 633, 692, 606, 750, 652, 664, 709, 193, 2645, 237, 160, 210, 203, 1635, 2645, 481, 536, 227, 333, 225, 229, 341, 443, 522, 584, 575, 241, 334, 1635, 551, 311, 314, 433, 422, 444, 488, 445, 570, 586, 588, 544, 590, 616, 558, 631, 597, 686, 712, 640, 662, 1627, 0, 684, 673, 672, 738, 718, 739, 758, 763, 721, 770, 775, 777, 786, 791, 799, 818, 0, 624, 630, 835, 0, 846, 860, 876, 887, 903, 914, 926, 940, 951, 1008, 968, 980, 1020, 658, 689, 691, 788, 772, 457, 487, 346, 339, 768, 778, 819, 850, 851, 508, 843, 1625, 1587, 606, 1611, 806, 840, 878, 873, 649, 715, 934, 939, 180, 959, 966, 937, 989, 999, 1005, 977, 998, 0, 1001, 1034, 1036, 952, 1042, 1039, 1049, 1056, 1061, 1067, 1070, 1076, 1085, 1095, 1031, 1122, 1136, 1153, 1165, 1188, 1199, 1210, 1226, 1238, 1249, 1265, 1281, 1293, 1057, 833, 360, 1094, 1574, 351, 538, 34, 1573, 1116, 1139, 924, 1182, 710, 885, 243, 1099, 2645, 1072, 1081, 1117, 934, 2645, 1130, 1145, 1058, 1156, 1187, 1198, 1200, 1137, 1259, 1563, 1552, 1262, 1282, 1284, 1294, 1297, 1298, 991, 1309, 1163, 1310, 1318, 1317, 1319, 1337, 1342, 1350, 1353, 1351, 1360, 1130, 1395, 1412, 1423, 1435, 1446, 1462, 1473, 1485, 1499, 1510, 1522, 1536, 1548, 1564, 1204, 1193, 1367, 1551, 1358, 2645, 719, 1043, 1546, 1369, 1320, 1395, 1537, 1367, 1530, 780, 642, 1312, 1226, 1205, 1264, 2645, 2645, 1386, 1231, 1417, 1429, 1409, 1421, 1525, 1519, 1516, 1511, 1471, 1534, 1488, 1526, 1570, 1562, 1467, 1375, 1575, 1576, 1584, 1589, 1590, 1596, 1591, 1600, 1504, 1634, 0, 1645, 1659, 1671, 1683, 1694, 1612, 1493, 1705, 1722, 1738, 1750, 1761, 1313, 1389, 1484, 1474, 1549, 748, 535, 1461, 1445, 1455, 1624, 805, 1483, 1603, 1471, 1535, 1617, 1614, 1633, 1684, 1716, 1708, 1719, 1749, 1755, 1682, 1723, 1448, 1764, 1769, 1669, 1776, 1405, 1777, 1786, 1788, 1389, 1363, 1824, 1836, 1847, 1861, 1877, 1346, 1894, 1905, 1919, 1933, 1297, 1647, 1782, 0, 445, 854, 1321, 2645, 1307, 1801, 1264, 990, 1579, 1818, 1650, 2645, 1268, 1803, 1689, 1875, 1910, 1254, 1250, 1927, 1931, 1844, 1850, 1922, 1809, 1825, 1940, 1942, 1945, 1953, 536, 1982, 1993, 2005, 0, 2016, 1962, 2027, 2038, 2050, 2061, 1754, 1213, 2645, 0, 2015, 794, 1216, 1117, 2645, 992, 1178, 1841, 1862, 1856, 1909, 2645, 1953, 1965, 2010, 2021, 1171, 1135, 2064, 2066, 2072, 0, 885, 2130, 2119, 1143, 1181, 0, 2142, 2086, 2162, 1923, 2645, 1963, 1956, 603, 1967, 1390, 663, 1086, 1815, 1863, 1886, 2076, 2087, 2089, 2097, 2145, 2163, 2075, 2185, 2203, 2217, 2234, 2250, 2007, 2017, 2110, 740, 2025, 1079, 1082, 2092, 2118, 2125, 2140, 2147, 2179, 2194, 2218, 2253, 2267, 2281, 2299, 2315, 1055, 2149, 2222, 2201, 949, 2164, 1026, 2183, 2233, 1026, 2240, 2258, 2278, 2296, 2297, 2316, 2322, 2345, 2363, 2379, 2395, 982, 2313, 957, 912, 934, 1256, 2236, 2242, 916, 2281, 2343, 2361, 2364, 2373, 2393, 2400, 2425, 2436, 2447, 2458, 2645, 2367, 2645, 894, 2322, 866, 2359, 2645, 818, 2645, 810, 762, 720, 691, 2452, 2472, 0, 2488, 2500, 690, 2240, 616, 2645, 2409, 2512, 2645, 2263, 2303, 0, 578, 2361, 2386, 2439, 2336, 455, 2392, 2449, 350, 2455, 236, 2480, 190, 184, 2645, 2476, 2486, 2478, 2486, 2500, 2489, 2489, 128, 2506, 2503, 2506, 2511, 2507, 2502, 14, 2508, 2510, 2509, 2508, 30, 2645, 2645, 2566, 2574, 2582, 2590, 2594, 2601, 2605, 2607, 2614, 2621, 4, 2628, 2636}; static const flex_int16_t yy_def[659] = { 0, 646, 645, 647, 647, 646, 646, 646, 2, 648, 645, 649, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 651, 645, 645, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 652, 652, 652, 652, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 654, 654, 654, 654, 654, 654, 654, 654, 654, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 645, 645, 645, 645, 645, 645, 650, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 651, 645, 645, 645, 645, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 652, 656, 652, 652, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 654, 654, 654, 655, 654, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 645, 645, 645, 645, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 656, 652, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 654, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 645, 645, 645, 645, 645, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 652, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 653, 654, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 655, 645, 645, 645, 645, 650, 645, 650, 650, 650, 650, 650, 645, 645, 650, 650, 650, 650, 650, 645, 645, 645, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 650, 656, 653, 653, 653, 653, 653, 656, 653, 653, 653, 653, 654, 654, 655, 654, 655, 655, 655, 655, 655, 654, 654, 655, 655, 655, 655, 655, 645, 645, 645, 645, 650, 650, 650, 650, 650, 645, 650, 650, 650, 650, 645, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 656, 653, 653, 653, 653, 656, 653, 653, 653, 654, 654, 655, 655, 655, 655, 655, 654, 655, 655, 655, 655, 645, 645, 645, 657, 650, 650, 650, 645, 650, 645, 650, 650, 650, 650, 645, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 650, 650, 653, 653, 653, 653, 653, 653, 654, 658, 655, 655, 655, 654, 655, 654, 655, 655, 655, 655, 645, 645, 645, 657, 650, 650, 645, 645, 645, 650, 650, 650, 645, 645, 645, 645, 650, 650, 650, 650, 650, 650, 653, 653, 653, 654, 658, 655, 655, 654, 654, 654, 655, 655, 655, 645, 645, 650, 650, 650, 645, 645, 650, 650, 645, 645, 645, 650, 650, 650, 650, 653, 653, 653, 655, 655, 655, 655, 655, 645, 650, 650, 650, 645, 650, 645, 645, 645, 645, 650, 650, 650, 650, 653, 653, 655, 655, 655, 655, 654, 645, 650, 650, 650, 645, 650, 645, 645, 645, 645, 650, 650, 650, 650, 653, 653, 655, 655, 655, 655, 645, 650, 650, 650, 645, 650, 645, 645, 645, 645, 650, 650, 650, 650, 653, 653, 655, 655, 655, 655, 645, 650, 645, 650, 645, 650, 645, 645, 645, 645, 650, 650, 650, 650, 653, 655, 654, 655, 655, 650, 645, 645, 645, 653, 655, 645, 645, 645, 654, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 0, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645}; static const flex_int16_t yy_nxt[2708] = { 0, 645, 15, 16, 17, 15, 18, 19, 20, 206, 21, 22, 22, 22, 23, 24, 25, 16, 17, 14, 16, 17, 14, 113, 97, 90, 16, 17, 90, 16, 17, 100, 97, 26, 39, 39, 39, 39, 39, 39, 92, 92, 92, 112, 112, 112, 644, 113, 97, 101, 134, 135, 135, 135, 639, 311, 26, 14, 15, 16, 17, 15, 18, 19, 20, 14, 21, 22, 22, 22, 23, 24, 25, 27, 27, 28, 27, 27, 27, 29, 27, 27, 27, 27, 27, 27, 30, 31, 27, 32, 33, 34, 27, 27, 27, 27, 14, 27, 27, 28, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 35, 27, 32, 36, 37, 27, 27, 27, 27, 15, 16, 17, 15, 18, 19, 20, 95, 21, 22, 22, 22, 23, 24, 25, 93, 94, 118, 97, 97, 97, 40, 97, 119, 632, 102, 107, 97, 41, 42, 95, 26, 43, 44, 105, 103, 98, 110, 93, 94, 99, 118, 97, 153, 104, 40, 97, 119, 106, 109, 108, 41, 42, 108, 26, 43, 44, 45, 105, 98, 110, 110, 169, 99, 46, 47, 153, 104, 48, 49, 255, 106, 152, 152, 152, 97, 90, 108, 97, 90, 624, 50, 623, 97, 110, 120, 169, 51, 52, 97, 129, 53, 54, 14, 15, 16, 17, 15, 18, 19, 55, 14, 21, 56, 56, 56, 23, 24, 25, 121, 114, 104, 115, 116, 130, 134, 170, 134, 171, 97, 97, 97, 97, 117, 97, 58, 621, 177, 140, 92, 92, 92, 14, 114, 104, 115, 97, 116, 97, 183, 170, 171, 176, 174, 319, 96, 117, 96, 58, 14, 15, 16, 17, 15, 18, 19, 55, 14, 21, 56, 56, 56, 23, 24, 25, 59, 59, 60, 59, 59, 59, 61, 59, 59, 59, 59, 59, 59, 62, 63, 59, 64, 65, 66, 59, 59, 59, 59, 14, 59, 59, 60, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 67, 59, 64, 68, 69, 59, 59, 59, 59, 15, 16, 17, 15, 71, 72, 73, 97, 74, 75, 75, 75, 76, 24, 77, 97, 186, 97, 97, 187, 97, 184, 121, 97, 122, 97, 123, 130, 151, 178, 97, 78, 619, 151, 151, 97, 151, 151, 175, 186, 305, 155, 187, 151, 306, 184, 121, 124, 243, 123, 242, 130, 309, 178, 78, 70, 15, 16, 17, 15, 71, 72, 73, 70, 74, 75, 75, 75, 76, 24, 77, 79, 79, 80, 79, 79, 79, 81, 79, 79, 79, 79, 79, 79, 82, 83, 79, 84, 85, 86, 79, 79, 79, 79, 70, 79, 79, 80, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 87, 79, 84, 88, 89, 79, 79, 79, 79, 97, 97, 97, 125, 97, 134, 464, 127, 131, 188, 97, 189, 97, 126, 190, 192, 97, 128, 616, 124, 110, 123, 104, 132, 97, 134, 104, 127, 179, 97, 136, 127, 132, 188, 189, 110, 128, 134, 190, 192, 128, 97, 124, 110, 123, 104, 141, 132, 97, 104, 240, 179, 134, 136, 97, 96, 97, 138, 110, 142, 134, 139, 134, 172, 97, 241, 97, 96, 134, 143, 134, 145, 97, 147, 97, 97, 191, 134, 144, 149, 138, 97, 96, 134, 139, 146, 172, 97, 148, 97, 96, 150, 96, 463, 250, 145, 144, 148, 96, 191, 96, 144, 97, 97, 463, 97, 150, 96, 146, 180, 173, 97, 148, 96, 150, 112, 112, 112, 151, 144, 197, 310, 421, 151, 151, 97, 151, 151, 200, 150, 151, 155, 180, 151, 173, 151, 151, 97, 151, 151, 611, 151, 97, 155, 197, 151, 151, 151, 159, 151, 151, 97, 201, 97, 155, 97, 151, 97, 193, 156, 151, 158, 181, 157, 97, 151, 151, 201, 151, 151, 97, 181, 182, 155, 194, 151, 195, 522, 196, 607, 165, 194, 156, 97, 254, 181, 157, 151, 152, 152, 152, 201, 151, 151, 181, 151, 151, 194, 97, 196, 155, 196, 151, 160, 166, 198, 151, 97, 254, 97, 204, 151, 151, 161, 151, 151, 97, 221, 151, 155, 199, 151, 162, 151, 151, 379, 151, 151, 199, 97, 97, 155, 205, 151, 205, 134, 134, 524, 166, 97, 221, 162, 261, 199, 235, 162, 151, 134, 135, 135, 135, 151, 151, 97, 151, 151, 205, 605, 97, 155, 207, 151, 166, 151, 162, 96, 163, 235, 151, 151, 236, 151, 151, 181, 202, 237, 155, 97, 151, 97, 164, 134, 97, 207, 134, 97, 97, 97, 97, 372, 163, 213, 209, 236, 317, 168, 181, 203, 237, 181, 203, 134, 134, 164, 151, 97, 97, 97, 262, 151, 151, 96, 151, 151, 96, 97, 209, 155, 168, 151, 208, 134, 181, 203, 167, 97, 134, 210, 543, 97, 97, 96, 96, 134, 420, 97, 168, 97, 134, 97, 134, 214, 97, 208, 97, 97, 211, 97, 244, 134, 378, 96, 212, 97, 134, 97, 96, 239, 97, 168, 215, 97, 134, 96, 216, 214, 97, 245, 96, 219, 96, 244, 97, 217, 218, 499, 238, 97, 217, 96, 239, 134, 602, 215, 96, 97, 97, 216, 220, 151, 245, 426, 96, 256, 151, 151, 217, 151, 151, 238, 151, 217, 155, 246, 151, 151, 151, 304, 151, 151, 97, 96, 220, 155, 151, 151, 256, 97, 97, 151, 151, 97, 151, 151, 465, 251, 246, 155, 222, 151, 151, 304, 257, 97, 223, 151, 151, 247, 151, 151, 248, 151, 463, 155, 249, 151, 151, 151, 251, 151, 151, 222, 97, 463, 155, 257, 151, 151, 223, 260, 247, 97, 151, 151, 224, 151, 151, 258, 151, 318, 155, 259, 151, 151, 151, 225, 151, 151, 588, 97, 151, 155, 260, 151, 227, 151, 151, 314, 151, 151, 258, 315, 226, 155, 151, 151, 584, 228, 583, 151, 151, 97, 151, 151, 97, 151, 97, 155, 324, 151, 151, 151, 263, 151, 151, 134, 97, 264, 155, 97, 151, 228, 151, 563, 582, 229, 97, 151, 151, 267, 151, 151, 324, 97, 151, 155, 264, 151, 230, 151, 151, 264, 151, 151, 97, 240, 265, 155, 229, 151, 233, 580, 268, 266, 340, 231, 97, 97, 306, 97, 469, 230, 151, 273, 134, 97, 97, 151, 151, 266, 151, 151, 97, 502, 151, 155, 266, 151, 231, 151, 151, 268, 151, 151, 274, 275, 274, 155, 269, 151, 234, 568, 270, 97, 271, 231, 232, 134, 272, 134, 565, 97, 134, 97, 268, 134, 97, 274, 275, 97, 97, 271, 134, 546, 234, 288, 97, 271, 231, 134, 278, 277, 276, 97, 134, 373, 279, 96, 97, 96, 134, 327, 96, 134, 97, 96, 281, 97, 288, 134, 546, 280, 96, 97, 277, 276, 97, 303, 134, 96, 282, 283, 97, 525, 96, 327, 321, 284, 134, 281, 96, 97, 97, 96, 322, 285, 97, 545, 307, 249, 303, 286, 501, 282, 283, 501, 151, 287, 96, 321, 284, 151, 151, 97, 151, 151, 320, 322, 96, 155, 151, 151, 307, 351, 312, 151, 151, 352, 151, 151, 287, 500, 97, 155, 97, 151, 97, 151, 323, 320, 289, 332, 151, 151, 325, 151, 151, 313, 312, 151, 155, 290, 151, 97, 151, 151, 134, 151, 151, 326, 308, 323, 155, 289, 151, 333, 501, 325, 97, 501, 328, 313, 151, 292, 290, 97, 291, 151, 151, 97, 151, 151, 326, 151, 97, 155, 96, 151, 151, 151, 316, 151, 151, 329, 151, 97, 155, 97, 151, 151, 151, 329, 151, 151, 500, 293, 368, 155, 367, 151, 151, 330, 496, 331, 316, 151, 151, 382, 151, 151, 295, 294, 151, 155, 329, 151, 381, 151, 151, 368, 151, 151, 367, 151, 331, 155, 331, 151, 151, 151, 382, 151, 151, 295, 385, 296, 155, 97, 151, 151, 381, 97, 297, 97, 151, 151, 97, 151, 151, 97, 473, 468, 155, 333, 151, 151, 298, 385, 296, 383, 151, 151, 585, 151, 151, 297, 334, 151, 155, 97, 151, 97, 151, 151, 299, 151, 151, 333, 300, 298, 155, 97, 151, 383, 97, 97, 336, 301, 335, 335, 338, 339, 134, 134, 466, 302, 97, 97, 337, 97, 134, 134, 134, 341, 97, 97, 97, 97, 97, 337, 460, 335, 343, 342, 339, 339, 380, 344, 302, 415, 134, 337, 96, 96, 97, 346, 375, 341, 455, 315, 96, 96, 96, 134, 134, 345, 134, 97, 97, 380, 97, 344, 415, 134, 349, 97, 449, 97, 347, 375, 96, 369, 369, 369, 97, 96, 97, 348, 134, 345, 371, 374, 97, 96, 96, 501, 96, 350, 501, 151, 377, 448, 347, 96, 151, 151, 384, 151, 151, 376, 376, 376, 155, 371, 151, 374, 151, 424, 96, 353, 350, 151, 151, 377, 151, 151, 97, 151, 416, 354, 384, 151, 151, 151, 97, 151, 151, 386, 97, 151, 155, 388, 151, 353, 151, 151, 97, 151, 151, 387, 151, 416, 155, 389, 151, 151, 151, 355, 151, 151, 417, 387, 97, 155, 389, 151, 151, 424, 356, 423, 357, 151, 151, 387, 151, 151, 389, 151, 422, 155, 134, 151, 151, 151, 97, 151, 151, 418, 97, 151, 155, 390, 151, 423, 151, 151, 417, 151, 151, 396, 97, 358, 155, 151, 151, 97, 404, 429, 151, 151, 96, 151, 360, 359, 151, 391, 361, 404, 151, 151, 151, 392, 151, 151, 427, 358, 151, 155, 97, 151, 429, 151, 151, 97, 151, 151, 97, 359, 362, 155, 151, 151, 97, 97, 393, 151, 151, 97, 151, 151, 370, 97, 151, 155, 391, 151, 363, 151, 151, 393, 151, 151, 362, 97, 370, 155, 97, 151, 151, 97, 364, 365, 430, 151, 151, 134, 151, 151, 391, 97, 97, 155, 393, 151, 394, 394, 394, 419, 134, 134, 97, 308, 97, 97, 395, 430, 97, 397, 399, 399, 399, 366, 134, 134, 134, 96, 97, 97, 97, 134, 419, 470, 398, 97, 403, 403, 403, 395, 96, 96, 97, 397, 400, 255, 366, 402, 410, 410, 410, 401, 253, 252, 96, 96, 96, 398, 428, 431, 151, 96, 134, 97, 185, 151, 151, 400, 151, 151, 402, 151, 432, 155, 97, 151, 151, 151, 425, 151, 151, 428, 134, 431, 155, 151, 151, 406, 405, 134, 151, 151, 472, 151, 151, 432, 433, 151, 155, 97, 151, 425, 151, 151, 134, 151, 151, 461, 422, 151, 155, 405, 151, 407, 151, 151, 472, 151, 151, 433, 151, 97, 155, 97, 151, 151, 151, 408, 151, 151, 461, 151, 91, 155, 96, 151, 151, 151, 645, 151, 151, 645, 434, 475, 155, 440, 151, 97, 151, 645, 645, 408, 409, 151, 151, 97, 151, 151, 97, 645, 411, 155, 97, 151, 151, 435, 475, 436, 441, 151, 151, 645, 151, 151, 435, 409, 151, 155, 437, 151, 412, 151, 151, 411, 151, 151, 441, 151, 97, 155, 437, 151, 151, 151, 97, 151, 151, 435, 645, 134, 155, 437, 151, 97, 134, 413, 645, 438, 97, 441, 495, 134, 134, 439, 645, 97, 97, 414, 462, 462, 462, 134, 444, 134, 442, 97, 645, 97, 443, 96, 439, 445, 645, 495, 96, 645, 439, 467, 467, 467, 414, 96, 96, 447, 134, 474, 444, 442, 97, 446, 151, 96, 645, 96, 445, 151, 151, 97, 151, 151, 134, 526, 151, 155, 466, 151, 447, 151, 151, 474, 151, 151, 645, 151, 96, 155, 471, 151, 151, 151, 97, 151, 151, 97, 450, 526, 155, 151, 151, 97, 96, 645, 151, 151, 451, 151, 151, 645, 480, 471, 453, 503, 151, 151, 481, 645, 452, 450, 151, 151, 527, 151, 151, 645, 97, 504, 155, 476, 151, 505, 151, 481, 645, 454, 503, 151, 151, 481, 151, 151, 645, 151, 528, 155, 527, 151, 151, 151, 504, 151, 151, 477, 505, 645, 155, 151, 151, 454, 456, 97, 151, 151, 477, 151, 151, 482, 528, 134, 155, 151, 151, 97, 506, 457, 151, 151, 97, 151, 151, 478, 97, 456, 155, 479, 151, 134, 477, 134, 519, 468, 134, 97, 458, 645, 97, 506, 645, 96, 459, 485, 485, 485, 97, 479, 645, 97, 483, 479, 490, 490, 490, 519, 97, 484, 97, 96, 507, 96, 151, 520, 96, 459, 645, 151, 151, 487, 151, 151, 508, 151, 521, 155, 523, 151, 151, 151, 484, 151, 151, 508, 645, 151, 155, 520, 151, 488, 151, 151, 645, 151, 151, 508, 151, 521, 155, 523, 151, 151, 151, 97, 151, 151, 645, 151, 97, 489, 97, 151, 151, 151, 97, 151, 151, 509, 151, 540, 491, 497, 151, 151, 151, 541, 151, 151, 510, 498, 151, 155, 645, 151, 492, 151, 151, 544, 151, 151, 510, 151, 540, 155, 497, 151, 151, 151, 541, 151, 151, 510, 498, 134, 155, 134, 151, 97, 493, 97, 544, 134, 645, 645, 134, 97, 151, 645, 525, 97, 511, 151, 151, 494, 151, 151, 529, 645, 512, 155, 97, 151, 97, 96, 645, 96, 513, 530, 645, 531, 97, 96, 645, 511, 96, 645, 494, 532, 645, 151, 530, 512, 645, 97, 151, 151, 547, 151, 151, 513, 151, 530, 155, 532, 151, 151, 151, 548, 151, 151, 542, 532, 151, 155, 645, 151, 516, 151, 151, 547, 151, 151, 645, 97, 134, 155, 514, 151, 97, 549, 97, 548, 151, 542, 515, 533, 645, 151, 151, 517, 151, 151, 134, 645, 550, 155, 97, 151, 645, 514, 645, 551, 549, 560, 96, 151, 564, 515, 645, 533, 151, 151, 97, 151, 151, 645, 518, 551, 155, 645, 151, 534, 96, 151, 551, 535, 560, 97, 151, 151, 564, 151, 151, 552, 97, 566, 155, 151, 151, 518, 645, 562, 151, 151, 534, 151, 151, 134, 553, 535, 155, 97, 151, 645, 151, 97, 553, 537, 566, 151, 151, 536, 151, 151, 561, 562, 554, 155, 645, 151, 151, 553, 645, 567, 538, 151, 151, 96, 151, 151, 569, 606, 134, 539, 536, 151, 97, 151, 561, 554, 586, 97, 151, 151, 587, 151, 151, 567, 570, 609, 155, 151, 151, 555, 569, 606, 151, 151, 645, 151, 151, 97, 96, 586, 155, 556, 151, 587, 571, 151, 645, 645, 571, 609, 151, 151, 555, 151, 151, 97, 97, 557, 155, 589, 151, 151, 572, 573, 556, 610, 151, 151, 571, 151, 151, 134, 97, 645, 155, 97, 151, 134, 645, 558, 557, 97, 589, 645, 574, 645, 573, 573, 575, 610, 645, 151, 645, 581, 645, 559, 151, 151, 645, 151, 151, 96, 97, 615, 155, 600, 151, 96, 574, 151, 645, 645, 575, 576, 151, 151, 581, 151, 151, 645, 97, 590, 155, 97, 151, 151, 97, 615, 600, 577, 151, 151, 97, 151, 151, 601, 599, 576, 155, 591, 151, 151, 592, 612, 591, 578, 151, 151, 134, 151, 151, 593, 97, 577, 155, 134, 151, 617, 601, 582, 599, 579, 591, 645, 134, 593, 612, 645, 605, 613, 645, 151, 645, 594, 593, 645, 151, 151, 96, 151, 151, 617, 151, 645, 155, 96, 151, 151, 151, 645, 151, 151, 613, 151, 96, 596, 594, 151, 151, 151, 645, 151, 151, 645, 151, 595, 155, 134, 151, 151, 151, 97, 151, 151, 645, 614, 645, 155, 151, 151, 620, 603, 645, 151, 151, 597, 151, 151, 595, 645, 618, 155, 645, 151, 151, 645, 598, 96, 614, 151, 151, 604, 151, 151, 620, 603, 151, 155, 627, 151, 628, 151, 151, 618, 151, 151, 622, 625, 151, 155, 626, 151, 629, 151, 151, 604, 151, 151, 630, 631, 633, 608, 627, 151, 628, 634, 635, 636, 645, 622, 625, 637, 638, 626, 640, 641, 629, 642, 643, 645, 645, 630, 631, 645, 633, 645, 645, 645, 634, 645, 635, 636, 645, 645, 637, 638, 645, 640, 641, 645, 642, 643, 14, 14, 14, 14, 14, 14, 14, 14, 38, 38, 38, 38, 38, 38, 38, 38, 57, 57, 57, 57, 57, 57, 57, 57, 70, 70, 70, 70, 70, 70, 70, 70, 96, 645, 96, 111, 111, 645, 645, 111, 111, 111, 111, 133, 133, 137, 137, 137, 151, 645, 645, 645, 151, 151, 151, 154, 645, 645, 645, 154, 154, 154, 463, 645, 645, 463, 463, 463, 463, 463, 486, 645, 645, 486, 486, 486, 486, 486, 13, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645}; static const flex_int16_t yy_chk[2708] = { 0, 0, 1, 1, 1, 1, 1, 1, 1, 656, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 4, 4, 4, 40, 29, 15, 5, 5, 15, 6, 6, 29, 30, 1, 5, 5, 5, 6, 6, 6, 22, 22, 22, 39, 39, 39, 643, 40, 242, 30, 56, 56, 56, 56, 638, 242, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 7, 7, 7, 7, 7, 7, 7, 26, 7, 7, 7, 7, 7, 7, 7, 24, 24, 43, 37, 33, 28, 7, 31, 44, 631, 31, 33, 32, 7, 7, 26, 7, 7, 7, 32, 31, 28, 37, 24, 24, 28, 43, 34, 78, 31, 7, 36, 44, 32, 34, 33, 7, 7, 36, 7, 7, 7, 8, 32, 28, 37, 34, 93, 28, 8, 8, 78, 31, 8, 8, 197, 32, 75, 75, 75, 197, 90, 36, 45, 90, 622, 8, 621, 48, 34, 45, 93, 8, 8, 35, 48, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 45, 41, 35, 41, 42, 48, 59, 94, 61, 95, 59, 102, 61, 100, 42, 103, 9, 619, 103, 61, 92, 92, 92, 9, 41, 35, 41, 109, 42, 250, 109, 94, 95, 102, 100, 250, 59, 42, 61, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 46, 11, 11, 11, 11, 11, 11, 11, 50, 113, 101, 110, 114, 53, 110, 50, 177, 46, 104, 46, 53, 79, 104, 176, 11, 617, 79, 79, 240, 79, 79, 101, 113, 237, 79, 114, 79, 237, 110, 50, 46, 177, 46, 176, 53, 240, 104, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 47, 49, 51, 47, 52, 58, 419, 52, 49, 115, 105, 116, 419, 47, 117, 119, 54, 52, 614, 51, 49, 51, 47, 54, 174, 62, 52, 47, 105, 62, 58, 52, 49, 115, 116, 54, 47, 60, 117, 119, 52, 60, 51, 49, 51, 47, 62, 54, 98, 52, 174, 105, 63, 58, 175, 62, 63, 60, 54, 63, 64, 60, 65, 98, 64, 175, 65, 60, 66, 63, 67, 64, 66, 65, 67, 183, 118, 68, 63, 66, 60, 68, 63, 69, 60, 64, 98, 69, 68, 106, 64, 66, 65, 449, 183, 64, 67, 65, 66, 118, 67, 63, 373, 99, 449, 241, 69, 68, 64, 106, 99, 123, 68, 69, 66, 112, 112, 112, 82, 67, 123, 241, 373, 82, 82, 126, 82, 82, 126, 69, 80, 82, 106, 82, 99, 80, 80, 120, 80, 80, 609, 81, 108, 80, 123, 80, 81, 81, 82, 81, 81, 107, 126, 121, 81, 122, 81, 124, 120, 80, 85, 81, 108, 80, 128, 85, 85, 128, 85, 85, 499, 107, 107, 85, 121, 85, 122, 499, 124, 601, 85, 120, 80, 125, 187, 108, 80, 83, 152, 152, 152, 128, 83, 83, 107, 83, 83, 121, 127, 122, 83, 124, 83, 83, 85, 125, 87, 131, 187, 319, 131, 87, 87, 83, 87, 87, 193, 153, 88, 87, 127, 87, 83, 88, 88, 319, 88, 88, 125, 132, 502, 88, 132, 88, 131, 137, 136, 502, 88, 137, 153, 87, 193, 127, 169, 83, 84, 135, 135, 135, 135, 84, 84, 129, 84, 84, 132, 599, 593, 84, 136, 84, 88, 89, 87, 137, 84, 169, 89, 89, 170, 89, 89, 129, 129, 171, 89, 248, 89, 130, 84, 139, 194, 136, 143, 139, 309, 592, 143, 309, 84, 143, 139, 170, 248, 89, 129, 129, 171, 130, 130, 138, 140, 84, 86, 138, 140, 522, 194, 86, 86, 139, 86, 86, 143, 372, 139, 86, 89, 86, 138, 141, 130, 130, 86, 141, 142, 140, 522, 591, 142, 138, 140, 144, 372, 178, 86, 144, 145, 173, 146, 144, 145, 138, 146, 179, 141, 318, 178, 147, 318, 141, 142, 147, 148, 172, 142, 173, 148, 86, 145, 465, 149, 144, 146, 144, 149, 179, 145, 149, 146, 178, 378, 147, 147, 465, 172, 590, 148, 147, 173, 150, 588, 145, 148, 150, 180, 146, 150, 154, 179, 378, 149, 189, 154, 154, 147, 154, 154, 172, 156, 148, 154, 180, 154, 156, 156, 236, 156, 156, 184, 150, 150, 156, 157, 156, 189, 181, 182, 157, 157, 420, 157, 157, 420, 184, 180, 157, 156, 157, 158, 236, 190, 585, 157, 158, 158, 181, 158, 158, 182, 159, 486, 158, 182, 158, 159, 159, 184, 159, 159, 156, 249, 486, 159, 190, 159, 160, 157, 192, 181, 583, 160, 160, 158, 160, 160, 191, 161, 249, 160, 191, 160, 161, 161, 159, 161, 161, 568, 563, 162, 161, 192, 161, 161, 162, 162, 246, 162, 162, 191, 246, 160, 162, 163, 162, 564, 162, 563, 163, 163, 195, 163, 163, 200, 164, 196, 163, 256, 163, 164, 164, 195, 164, 164, 210, 543, 196, 164, 210, 164, 162, 166, 543, 562, 163, 198, 166, 166, 200, 166, 166, 256, 199, 167, 166, 195, 166, 164, 167, 167, 196, 167, 167, 204, 210, 198, 167, 163, 167, 167, 560, 200, 199, 275, 166, 201, 426, 275, 469, 426, 164, 165, 204, 207, 205, 202, 165, 165, 198, 165, 165, 203, 469, 168, 165, 199, 165, 166, 168, 168, 201, 168, 168, 205, 207, 204, 168, 202, 168, 168, 548, 202, 545, 203, 165, 165, 208, 203, 209, 545, 208, 212, 209, 201, 211, 212, 205, 207, 211, 310, 202, 213, 539, 168, 221, 213, 203, 165, 214, 211, 209, 208, 214, 215, 310, 212, 208, 215, 209, 216, 260, 212, 217, 216, 211, 214, 217, 221, 218, 525, 213, 213, 218, 209, 208, 524, 235, 219, 214, 215, 216, 219, 503, 215, 260, 253, 217, 220, 214, 216, 238, 220, 217, 254, 218, 251, 524, 238, 218, 235, 219, 467, 215, 216, 467, 222, 220, 219, 253, 217, 222, 222, 244, 222, 222, 251, 254, 220, 222, 223, 222, 238, 288, 244, 223, 223, 288, 223, 223, 220, 489, 481, 223, 265, 223, 245, 224, 255, 251, 222, 265, 224, 224, 258, 224, 224, 245, 244, 225, 224, 223, 224, 261, 225, 225, 277, 225, 225, 259, 277, 255, 225, 222, 225, 265, 490, 258, 480, 490, 261, 245, 226, 225, 223, 470, 224, 226, 226, 247, 226, 226, 259, 227, 262, 226, 277, 226, 227, 227, 247, 227, 227, 261, 228, 263, 227, 264, 227, 228, 228, 262, 228, 228, 466, 226, 304, 228, 303, 228, 229, 263, 461, 264, 247, 229, 229, 322, 229, 229, 228, 227, 230, 229, 262, 229, 321, 230, 230, 304, 230, 230, 303, 231, 263, 230, 264, 230, 231, 231, 322, 231, 231, 228, 327, 229, 231, 437, 231, 232, 321, 436, 230, 565, 232, 232, 266, 232, 232, 269, 431, 425, 232, 266, 232, 233, 231, 327, 229, 323, 233, 233, 565, 233, 233, 230, 269, 234, 233, 270, 233, 271, 234, 234, 232, 234, 234, 266, 232, 231, 234, 272, 234, 323, 273, 274, 270, 233, 271, 269, 273, 274, 276, 278, 423, 234, 276, 278, 272, 320, 280, 279, 281, 276, 280, 279, 281, 313, 421, 270, 415, 271, 279, 278, 273, 274, 320, 281, 234, 367, 282, 272, 276, 278, 282, 283, 313, 276, 410, 283, 280, 279, 281, 284, 286, 282, 285, 284, 286, 320, 285, 281, 367, 287, 286, 307, 404, 287, 284, 313, 282, 305, 305, 305, 316, 283, 312, 285, 343, 282, 307, 312, 343, 284, 286, 501, 285, 287, 501, 289, 316, 403, 284, 287, 289, 289, 326, 289, 289, 314, 314, 314, 289, 307, 289, 312, 290, 399, 343, 289, 287, 290, 290, 316, 290, 290, 330, 291, 368, 290, 326, 290, 291, 291, 328, 291, 291, 328, 331, 292, 291, 330, 291, 289, 292, 292, 329, 292, 292, 329, 293, 368, 292, 331, 292, 293, 293, 291, 293, 293, 394, 328, 375, 293, 330, 293, 294, 376, 292, 375, 293, 294, 294, 329, 294, 294, 331, 295, 374, 294, 342, 294, 295, 295, 342, 295, 295, 370, 336, 296, 295, 336, 295, 375, 296, 296, 369, 296, 296, 342, 379, 295, 296, 297, 296, 338, 361, 381, 297, 297, 342, 297, 297, 296, 298, 336, 297, 352, 297, 298, 298, 338, 298, 298, 379, 295, 299, 298, 335, 298, 381, 299, 299, 334, 299, 299, 333, 296, 298, 299, 300, 299, 332, 339, 338, 300, 300, 317, 300, 300, 315, 337, 301, 300, 337, 300, 299, 301, 301, 339, 301, 301, 298, 311, 306, 301, 371, 301, 302, 268, 300, 301, 382, 302, 302, 341, 302, 302, 337, 341, 267, 302, 339, 302, 340, 340, 340, 371, 344, 345, 243, 239, 344, 345, 341, 382, 427, 344, 346, 346, 346, 302, 347, 348, 350, 341, 347, 348, 350, 349, 371, 427, 345, 349, 351, 351, 351, 341, 344, 345, 380, 344, 347, 188, 302, 350, 360, 360, 360, 349, 186, 185, 347, 348, 350, 345, 380, 383, 353, 349, 133, 377, 111, 353, 353, 347, 353, 353, 350, 355, 384, 353, 96, 353, 355, 355, 377, 355, 355, 380, 57, 383, 355, 356, 355, 355, 353, 55, 356, 356, 429, 356, 356, 384, 385, 357, 356, 27, 356, 377, 357, 357, 397, 357, 357, 416, 397, 358, 357, 353, 357, 356, 358, 358, 429, 358, 358, 385, 359, 392, 358, 386, 358, 359, 359, 358, 359, 359, 416, 362, 17, 359, 397, 359, 362, 362, 13, 362, 362, 0, 386, 433, 362, 392, 362, 388, 363, 0, 0, 358, 359, 363, 363, 387, 363, 363, 389, 0, 362, 363, 393, 363, 364, 386, 433, 388, 392, 364, 364, 0, 364, 364, 387, 359, 365, 364, 389, 364, 364, 365, 365, 362, 365, 365, 393, 366, 390, 365, 388, 365, 366, 366, 391, 366, 366, 387, 0, 395, 366, 389, 366, 395, 396, 365, 0, 390, 396, 393, 460, 398, 400, 391, 0, 398, 400, 366, 417, 417, 417, 401, 398, 402, 395, 401, 0, 402, 396, 395, 390, 400, 0, 460, 396, 0, 391, 424, 424, 424, 366, 398, 400, 402, 443, 432, 398, 395, 443, 401, 405, 401, 0, 402, 400, 405, 405, 428, 405, 405, 444, 504, 406, 405, 444, 405, 402, 406, 406, 432, 406, 406, 0, 407, 443, 406, 428, 406, 407, 407, 471, 407, 407, 440, 405, 504, 407, 408, 407, 441, 444, 0, 408, 408, 406, 408, 408, 0, 440, 428, 408, 471, 408, 409, 441, 0, 407, 405, 409, 409, 505, 409, 409, 0, 434, 472, 409, 434, 409, 473, 411, 440, 0, 409, 471, 411, 411, 441, 411, 411, 0, 412, 506, 411, 505, 411, 412, 412, 472, 412, 412, 434, 473, 0, 412, 413, 412, 409, 411, 435, 413, 413, 435, 413, 413, 442, 506, 442, 413, 414, 413, 442, 474, 412, 414, 414, 438, 414, 414, 438, 439, 411, 414, 439, 414, 445, 435, 446, 495, 445, 447, 446, 413, 0, 447, 474, 0, 442, 414, 448, 448, 448, 476, 438, 0, 498, 446, 439, 455, 455, 455, 495, 497, 447, 477, 445, 476, 446, 450, 497, 447, 414, 0, 450, 450, 450, 450, 450, 477, 451, 498, 450, 500, 450, 451, 451, 447, 451, 451, 476, 0, 452, 451, 497, 451, 451, 452, 452, 0, 452, 452, 477, 454, 498, 452, 500, 452, 454, 454, 478, 454, 454, 0, 456, 464, 454, 520, 454, 456, 456, 479, 456, 456, 478, 457, 519, 456, 464, 456, 457, 457, 520, 457, 457, 479, 464, 458, 457, 0, 457, 457, 458, 458, 523, 458, 458, 478, 459, 519, 458, 464, 458, 459, 459, 520, 459, 459, 479, 464, 482, 459, 483, 459, 482, 458, 483, 523, 484, 0, 0, 513, 484, 493, 0, 513, 507, 482, 493, 493, 459, 493, 493, 507, 0, 482, 493, 508, 493, 509, 482, 0, 483, 484, 508, 0, 509, 510, 484, 0, 482, 513, 0, 459, 510, 0, 488, 507, 482, 0, 521, 488, 488, 526, 488, 488, 484, 487, 508, 488, 509, 488, 487, 487, 527, 487, 487, 521, 510, 492, 487, 0, 487, 488, 492, 492, 526, 492, 492, 0, 529, 511, 492, 487, 492, 511, 528, 530, 527, 494, 521, 487, 511, 0, 494, 494, 492, 494, 494, 512, 0, 529, 494, 512, 494, 0, 487, 0, 530, 528, 540, 511, 514, 544, 487, 0, 511, 514, 514, 531, 514, 514, 0, 494, 529, 514, 0, 514, 512, 512, 515, 530, 514, 540, 532, 515, 515, 544, 515, 515, 531, 542, 546, 515, 516, 515, 494, 0, 542, 516, 516, 512, 516, 516, 533, 532, 514, 516, 533, 516, 0, 517, 541, 531, 516, 546, 517, 517, 515, 517, 517, 541, 542, 533, 517, 0, 517, 518, 532, 0, 547, 517, 518, 518, 533, 518, 518, 549, 600, 534, 518, 515, 518, 534, 535, 541, 533, 566, 550, 535, 535, 567, 535, 535, 547, 550, 606, 535, 536, 535, 534, 549, 600, 536, 536, 0, 536, 536, 551, 534, 566, 536, 535, 536, 567, 551, 537, 0, 0, 550, 606, 537, 537, 534, 537, 537, 552, 553, 536, 537, 569, 537, 538, 552, 553, 535, 607, 538, 538, 551, 538, 538, 554, 561, 0, 538, 554, 538, 555, 0, 537, 536, 555, 569, 0, 554, 0, 552, 553, 555, 607, 0, 556, 0, 561, 0, 538, 556, 556, 0, 556, 556, 554, 570, 613, 556, 584, 556, 555, 554, 557, 0, 0, 555, 556, 557, 557, 561, 557, 557, 0, 571, 570, 557, 572, 557, 558, 581, 613, 584, 557, 558, 558, 573, 558, 558, 586, 581, 556, 558, 571, 558, 559, 572, 610, 570, 558, 559, 559, 574, 559, 559, 573, 574, 557, 559, 575, 559, 615, 586, 575, 581, 559, 571, 0, 603, 572, 610, 0, 603, 611, 0, 576, 0, 574, 573, 0, 576, 576, 574, 576, 576, 615, 577, 0, 576, 575, 576, 577, 577, 0, 577, 577, 611, 578, 603, 577, 574, 577, 578, 578, 0, 578, 578, 0, 579, 576, 578, 594, 578, 579, 579, 594, 579, 579, 0, 612, 0, 579, 595, 579, 618, 594, 0, 595, 595, 578, 595, 595, 576, 0, 616, 595, 0, 595, 597, 0, 579, 594, 612, 597, 597, 595, 597, 597, 618, 594, 598, 597, 626, 597, 627, 598, 598, 616, 598, 598, 620, 624, 604, 598, 625, 598, 628, 604, 604, 595, 604, 604, 629, 630, 632, 604, 626, 604, 627, 633, 634, 635, 0, 620, 624, 636, 637, 625, 639, 640, 628, 641, 642, 0, 0, 629, 630, 0, 632, 0, 0, 0, 633, 0, 634, 635, 0, 0, 636, 637, 0, 639, 640, 0, 641, 642, 646, 646, 646, 646, 646, 646, 646, 646, 647, 647, 647, 647, 647, 647, 647, 647, 648, 648, 648, 648, 648, 648, 648, 648, 649, 649, 649, 649, 649, 649, 649, 649, 650, 0, 650, 651, 651, 0, 0, 651, 651, 651, 651, 652, 652, 653, 653, 653, 654, 0, 0, 0, 654, 654, 654, 655, 0, 0, 0, 655, 655, 655, 657, 0, 0, 657, 657, 657, 657, 657, 658, 0, 0, 658, 658, 658, 658, 658, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645, 645}; /* The intent behind this definition is that it'll catch * any uses of REJECT which flex missed. */ #define REJECT reject_used_but_not_detected #define yymore() yymore_used_but_not_detected #define YY_MORE_ADJ 0 #define YY_RESTORE_YY_MORE_OFFSET #include <string> #include "parser.h" #define yyterminate() return (END) #define INITIAL 0 #define MATCH_STRING_STATE 1 #define MATCH_RESPONSE_CODE_STATE 2 #define SUPPORTED_METHODS_STATE 3 #define CONTENT_TYPE_STATE 4 #define SESSION_STATE 5 #ifndef YY_NO_UNISTD_H /* Special case for "unistd.h", since it is non-ANSI. We include it way * down here because we want the user's section 1 to have been scanned * first. The user has a chance to override it with an option. */ #include <unistd.h> #endif #define YY_EXTRA_TYPE bool /* Holds the entire state of the reentrant scanner. */ struct yyguts_t { /* User-defined. Not touched by flex. */ YY_EXTRA_TYPE yyextra_r; /* The rest are the same as the globals declared in the non-reentrant * scanner. */ FILE *yyin_r, *yyout_r; size_t yy_buffer_stack_top; /**< index of top of stack. */ size_t yy_buffer_stack_max; /**< capacity of stack. */ YY_BUFFER_STATE* yy_buffer_stack; /**< Stack as an array. */ char yy_hold_char; int yy_n_chars; int yyleng_r; char* yy_c_buf_p; int yy_init; int yy_start; int yy_did_buffer_switch_on_eof; int yy_start_stack_ptr; int yy_start_stack_depth; int* yy_start_stack; yy_state_type yy_last_accepting_state; char* yy_last_accepting_cpos; int yylineno_r; int yy_flex_debug_r; char* yytext_r; int yy_more_flag; int yy_more_len; YYSTYPE* yylval_r; }; /* end struct yyguts_t */ static int yy_init_globals(yyscan_t yyscanner); /* This must go here because YYSTYPE and YYLTYPE are included * from bison output in section 1.*/ #define yylval yyg->yylval_r int yylex_init(yyscan_t* scanner); int yylex_init_extra(YY_EXTRA_TYPE user_defined, yyscan_t* scanner); /* Accessor methods to globals. These are made visible to non-reentrant scanners for convenience. */ int yylex_destroy(yyscan_t yyscanner); int yyget_debug(yyscan_t yyscanner); void yyset_debug(int debug_flag, yyscan_t yyscanner); YY_EXTRA_TYPE yyget_extra(yyscan_t yyscanner); void yyset_extra(YY_EXTRA_TYPE user_defined, yyscan_t yyscanner); FILE* yyget_in(yyscan_t yyscanner); void yyset_in(FILE* _in_str, yyscan_t yyscanner); FILE* yyget_out(yyscan_t yyscanner); void yyset_out(FILE* _out_str, yyscan_t yyscanner); int yyget_leng(yyscan_t yyscanner); char* yyget_text(yyscan_t yyscanner); int yyget_lineno(yyscan_t yyscanner); void yyset_lineno(int _line_number, yyscan_t yyscanner); int yyget_column(yyscan_t yyscanner); void yyset_column(int _column_no, yyscan_t yyscanner); YYSTYPE* yyget_lval(yyscan_t yyscanner); void yyset_lval(YYSTYPE* yylval_param, yyscan_t yyscanner); /* Macros after this point can all be overridden by user definitions in * section 1. */ #ifndef YY_SKIP_YYWRAP #ifdef __cplusplus extern "C" int yywrap(yyscan_t yyscanner); #else extern int yywrap(yyscan_t yyscanner); #endif #endif #ifndef YY_NO_UNPUT #endif #ifndef yytext_ptr static void yy_flex_strncpy(char*, const char*, int, yyscan_t yyscanner); #endif #ifdef YY_NEED_STRLEN static int yy_flex_strlen(const char*, yyscan_t yyscanner); #endif #ifndef YY_NO_INPUT #ifdef __cplusplus static int yyinput(yyscan_t yyscanner); #else static int input(yyscan_t yyscanner); #endif #endif /* Amount of stuff to slurp up with each read. */ #ifndef YY_READ_BUF_SIZE #ifdef __ia64__ /* On IA-64, the buffer size is 16k, not 8k */ #define YY_READ_BUF_SIZE 16384 #else #define YY_READ_BUF_SIZE 8192 #endif /* __ia64__ */ #endif /* Copy whatever the last rule matched to the standard output. */ #ifndef ECHO /* This used to be an fputs(), but since the string might contain NUL's, * we now use fwrite(). */ #define ECHO \ do { \ if (fwrite(yytext, (size_t)yyleng, 1, yyout)) { \ } \ } while (0) #endif /* Gets input and stuffs it into "buf". number of characters read, or YY_NULL, * is returned in "result". */ #ifndef YY_INPUT #define YY_INPUT(buf, result, max_size) \ if (YY_CURRENT_BUFFER_LVALUE->yy_is_interactive) { \ int c = '*'; \ int n; \ for (n = 0; n < max_size && (c = getc(yyin)) != EOF && c != '\n'; ++n) \ buf[n] = (char)c; \ if (c == '\n') \ buf[n++] = (char)c; \ if (c == EOF && ferror(yyin)) \ YY_FATAL_ERROR("input in flex scanner failed"); \ result = n; \ } else { \ errno = 0; \ while ((result = (int)fread(buf, 1, (yy_size_t)max_size, yyin)) == 0 && \ ferror(yyin)) { \ if (errno != EINTR) { \ YY_FATAL_ERROR("input in flex scanner failed"); \ break; \ } \ errno = 0; \ clearerr(yyin); \ } \ } #endif /* No semi-colon after return; correct usage is to write "yyterminate();" - * we don't want an extra ';' after the "return" because that will cause * some compilers to complain about unreachable statements. */ #ifndef yyterminate #define yyterminate() return YY_NULL #endif /* Number of entries by which start-condition stack grows. */ #ifndef YY_START_STACK_INCR #define YY_START_STACK_INCR 25 #endif /* Report a fatal error. */ #ifndef YY_FATAL_ERROR #define YY_FATAL_ERROR(msg) yy_fatal_error( msg , yyscanner) #endif /* end tables serialization structures and prototypes */ /* Default declaration of generated scanner - a define so the user can * easily add parameters. */ #ifndef YY_DECL #define YY_DECL_IS_OURS 1 extern int yylex(YYSTYPE* yylval_param, yyscan_t yyscanner); #define YY_DECL int yylex(YYSTYPE* yylval_param, yyscan_t yyscanner) #endif /* !YY_DECL */ /* Code executed at the beginning of each rule, after yytext and yyleng * have been set up. */ #ifndef YY_USER_ACTION #define YY_USER_ACTION #endif /* Code executed at the end of each rule. */ #ifndef YY_BREAK #define YY_BREAK /*LINTED*/ break; #endif #define YY_RULE_SETUP \ if ( yyleng > 0 ) \ YY_CURRENT_BUFFER_LVALUE->yy_at_bol = \ (yytext[yyleng - 1] == '\n'); \ YY_USER_ACTION /** The main scanner function which does all the work. */ YY_DECL { yy_state_type yy_current_state; char *yy_cp, *yy_bp; int yy_act; struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yylval = yylval_param; if (!yyg->yy_init) { yyg->yy_init = 1; #ifdef YY_USER_INIT YY_USER_INIT; #endif if (!yyg->yy_start) yyg->yy_start = 1; /* first start state */ if (!yyin) yyin = stdin; if (!yyout) yyout = stdout; if (!YY_CURRENT_BUFFER) { yyensure_buffer_stack(yyscanner); YY_CURRENT_BUFFER_LVALUE = yy_create_buffer(yyin, YY_BUF_SIZE, yyscanner); } yy_load_buffer_state(yyscanner); } { while ( /*CONSTCOND*/ 1) /* loops until end-of-file is reached */ { yy_cp = yyg->yy_c_buf_p; /* Support of yytext. */ *yy_cp = yyg->yy_hold_char; /* yy_bp points to the position in yy_ch_buf of the start of * the current run. */ yy_bp = yy_cp; yy_current_state = yyg->yy_start; yy_current_state += YY_AT_BOL(); yy_match: do { YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)]; if (yy_accept[yy_current_state]) { yyg->yy_last_accepting_state = yy_current_state; yyg->yy_last_accepting_cpos = yy_cp; } while (yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state) { yy_current_state = (int)yy_def[yy_current_state]; if (yy_current_state >= 646) yy_c = yy_meta[yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + yy_c]; ++yy_cp; } while (yy_current_state != 645); yy_cp = yyg->yy_last_accepting_cpos; yy_current_state = yyg->yy_last_accepting_state; yy_find_action: yy_act = yy_accept[yy_current_state]; YY_DO_BEFORE_ACTION; do_action: /* This label is used only to access EOF actions. */ switch (yy_act) { /* beginning of action switch */ case 0: /* must back up */ /* undo the effects of YY_DO_BEFORE_ACTION */ *yy_cp = yyg->yy_hold_char; yy_cp = yyg->yy_last_accepting_cpos; yy_current_state = yyg->yy_last_accepting_state; goto yy_find_action; case 1: /* rule 1 can match eol */ YY_RULE_SETUP { BEGIN(INITIAL); } YY_BREAK case 2: /* rule 2 can match eol */ YY_RULE_SETUP { BEGIN(INITIAL); } YY_BREAK case 3: YY_RULE_SETUP { BEGIN(INITIAL); } YY_BREAK /* Convert these */ case 4: YY_RULE_SETUP { return '='; } YY_BREAK case 5: YY_RULE_SETUP { return '-'; } YY_BREAK case 6: YY_RULE_SETUP { return ','; } YY_BREAK case 7: YY_RULE_SETUP { return '*'; } YY_BREAK case 8: YY_RULE_SETUP { return ';'; } YY_BREAK case 9: YY_RULE_SETUP { return ':'; } YY_BREAK case 10: YY_RULE_SETUP { return '/'; } YY_BREAK case 11: YY_RULE_SETUP return WFD_SP; YY_BREAK case 12: YY_RULE_SETUP return WFD_TIMEOUT; YY_BREAK case 13: YY_RULE_SETUP return WFD_SERVER_PORT; YY_BREAK case 14: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_OPTIONS; } YY_BREAK case 15: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_SET_PARAMETER; } YY_BREAK case 16: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_GET_PARAMETER; } YY_BREAK case 17: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_SETUP; } YY_BREAK case 18: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_PLAY; } YY_BREAK case 19: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_TEARDOWN; } YY_BREAK case 20: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_PAUSE; } YY_BREAK /* RTSP response, get reply code, RTSP/1.0 200 OK */ case 21: YY_RULE_SETUP { BEGIN(MATCH_RESPONSE_CODE_STATE); return WFD_RESPONSE; } YY_BREAK /* CSeq: i */ case 22: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_CSEQ; } YY_BREAK case 23: YY_RULE_SETUP { BEGIN(SUPPORTED_METHODS_STATE); return WFD_RESPONSE_METHODS; } YY_BREAK case 24: YY_RULE_SETUP { return WFD_SUPPORT_CHECK; } YY_BREAK case 25: YY_RULE_SETUP { BEGIN(CONTENT_TYPE_STATE); return WFD_CONTENT_TYPE; } YY_BREAK case 26: YY_RULE_SETUP { BEGIN(INITIAL); return WFD_CONTENT_LENGTH; } YY_BREAK case 27: YY_RULE_SETUP { BEGIN(SESSION_STATE); return WFD_SESSION; } YY_BREAK case 28: YY_RULE_SETUP { return WFD_TRANSPORT; } YY_BREAK case 29: YY_RULE_SETUP { BEGIN(MATCH_STRING_STATE); yylval->sval = new std::string(yytext, yyleng - 1); return WFD_HEADER; } YY_BREAK case 30: YY_RULE_SETUP { BEGIN(INITIAL); yylval->sval = new std::string(yytext, yyleng); return WFD_SESSION_ID; } YY_BREAK case 31: YY_RULE_SETUP { BEGIN(MATCH_STRING_STATE); yylval->nval = atoi(yytext); return WFD_RESPONSE_CODE; } YY_BREAK case 32: /* rule 32 can match eol */ *yy_cp = yyg->yy_hold_char; /* undo effects of setting up yytext */ YY_LINENO_REWIND_TO(yy_cp - 2); yyg->yy_c_buf_p = yy_cp -= 2; YY_DO_BEFORE_ACTION; /* set up yytext again */ YY_RULE_SETUP { BEGIN(INITIAL); yylval->sval = new std::string(yytext); return WFD_STRING; } YY_BREAK case 33: YY_RULE_SETUP { return WFD_OPTIONS; } YY_BREAK case 34: YY_RULE_SETUP { return WFD_SET_PARAMETER; } YY_BREAK case 35: YY_RULE_SETUP { return WFD_GET_PARAMETER; } YY_BREAK case 36: YY_RULE_SETUP { return WFD_SETUP; } YY_BREAK case 37: YY_RULE_SETUP { return WFD_PLAY; } YY_BREAK case 38: YY_RULE_SETUP { return WFD_TEARDOWN; } YY_BREAK case 39: YY_RULE_SETUP { return WFD_PAUSE; } YY_BREAK case 40: YY_RULE_SETUP { return WFD_TAG; } YY_BREAK case 41: YY_RULE_SETUP { BEGIN(INITIAL); yylval->sval = new std::string(yytext); return WFD_MIME; } YY_BREAK case 42: YY_RULE_SETUP { std::string str(yytext, yyleng); str += '\0'; errno = 0; yylval->nval = strtoull(str.c_str(), NULL, 10); if (errno) yyterminate(); return WFD_NUM; } YY_BREAK /* RTSP request rule, e.g., OPTIONS * RTSP/1.0 */ case 43: YY_RULE_SETUP { return WFD_END; } YY_BREAK /* GET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0 */ case 44: YY_RULE_SETUP { yylval->sval = new std::string(yytext); return WFD_REQUEST_URI; } YY_BREAK /* all unmatched */ case 45: YY_RULE_SETUP {} YY_BREAK case 46: YY_RULE_SETUP YY_FATAL_ERROR("flex scanner jammed"); YY_BREAK case YY_STATE_EOF(INITIAL): case YY_STATE_EOF(MATCH_STRING_STATE): case YY_STATE_EOF(MATCH_RESPONSE_CODE_STATE): case YY_STATE_EOF(SUPPORTED_METHODS_STATE): case YY_STATE_EOF(CONTENT_TYPE_STATE): case YY_STATE_EOF(SESSION_STATE): yyterminate(); case YY_END_OF_BUFFER: { /* Amount of text matched not including the EOB char. */ int yy_amount_of_matched_text = (int)(yy_cp - yyg->yytext_ptr) - 1; /* Undo the effects of YY_DO_BEFORE_ACTION. */ *yy_cp = yyg->yy_hold_char; YY_RESTORE_YY_MORE_OFFSET if (YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_NEW) { /* We're scanning a new file or input source. It's * possible that this happened because the user * just pointed yyin at a new source and called * yylex(). If so, then we have to assure * consistency between YY_CURRENT_BUFFER and our * globals. Here is the right place to do so, because * this is the first action (other than possibly a * back-up) that will match for the new input source. */ yyg->yy_n_chars = YY_CURRENT_BUFFER_LVALUE->yy_n_chars; YY_CURRENT_BUFFER_LVALUE->yy_input_file = yyin; YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_NORMAL; } /* Note that here we test for yy_c_buf_p "<=" to the position * of the first EOB in the buffer, since yy_c_buf_p will * already have been incremented past the NUL character * (since all states make transitions on EOB to the * end-of-buffer state). Contrast this with the test * in input(). */ if (yyg->yy_c_buf_p <= &YY_CURRENT_BUFFER_LVALUE ->yy_ch_buf[yyg->yy_n_chars]) { /* This was really a NUL. */ yy_state_type yy_next_state; yyg->yy_c_buf_p = yyg->yytext_ptr + yy_amount_of_matched_text; yy_current_state = yy_get_previous_state(yyscanner); /* Okay, we're now positioned to make the NUL * transition. We couldn't have * yy_get_previous_state() go ahead and do it * for us because it doesn't know how to deal * with the possibility of jamming (and we don't * want to build jamming into it because then it * will run more slowly). */ yy_next_state = yy_try_NUL_trans(yy_current_state, yyscanner); yy_bp = yyg->yytext_ptr + YY_MORE_ADJ; if (yy_next_state) { /* Consume the NUL. */ yy_cp = ++yyg->yy_c_buf_p; yy_current_state = yy_next_state; goto yy_match; } else { yy_cp = yyg->yy_last_accepting_cpos; yy_current_state = yyg->yy_last_accepting_state; goto yy_find_action; } } else switch (yy_get_next_buffer(yyscanner)) { case EOB_ACT_END_OF_FILE: { yyg->yy_did_buffer_switch_on_eof = 0; if (yywrap(yyscanner)) { /* Note: because we've taken care in * yy_get_next_buffer() to have set up * yytext, we can now set up * yy_c_buf_p so that if some total * hoser (like flex itself) wants to * call the scanner after we return the * YY_NULL, it'll still work - another * YY_NULL will get returned. */ yyg->yy_c_buf_p = yyg->yytext_ptr + YY_MORE_ADJ; yy_act = YY_STATE_EOF(YY_START); goto do_action; } else { if (!yyg->yy_did_buffer_switch_on_eof) YY_NEW_FILE; } break; } case EOB_ACT_CONTINUE_SCAN: yyg->yy_c_buf_p = yyg->yytext_ptr + yy_amount_of_matched_text; yy_current_state = yy_get_previous_state(yyscanner); yy_cp = yyg->yy_c_buf_p; yy_bp = yyg->yytext_ptr + YY_MORE_ADJ; goto yy_match; case EOB_ACT_LAST_MATCH: yyg->yy_c_buf_p = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars]; yy_current_state = yy_get_previous_state(yyscanner); yy_cp = yyg->yy_c_buf_p; yy_bp = yyg->yytext_ptr + YY_MORE_ADJ; goto yy_find_action; } break; } default: YY_FATAL_ERROR("fatal flex scanner internal error--no action found"); } /* end of action switch */ } /* end of scanning one token */ } /* end of user's declarations */ } /* end of yylex */ /* yy_get_next_buffer - try to read in a new buffer * * Returns a code representing an action: * EOB_ACT_LAST_MATCH - * EOB_ACT_CONTINUE_SCAN - continue scanning from current position * EOB_ACT_END_OF_FILE - end of file */ static int yy_get_next_buffer (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; char* dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf; char* source = yyg->yytext_ptr; int number_to_move, i; int ret_val; if (yyg->yy_c_buf_p > &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars + 1]) YY_FATAL_ERROR("fatal flex scanner internal error--end of buffer missed"); if (YY_CURRENT_BUFFER_LVALUE->yy_fill_buffer == 0) { /* Don't try to fill the buffer, so this is an EOF. */ if (yyg->yy_c_buf_p - yyg->yytext_ptr - YY_MORE_ADJ == 1) { /* We matched a single character, the EOB, so * treat this as a final EOF. */ return EOB_ACT_END_OF_FILE; } else { /* We matched some text prior to the EOB, first * process it. */ return EOB_ACT_LAST_MATCH; } } /* Try to read more data. */ /* First move last chars to start of buffer. */ number_to_move = (int)(yyg->yy_c_buf_p - yyg->yytext_ptr - 1); for (i = 0; i < number_to_move; ++i) *(dest++) = *(source++); if (YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_EOF_PENDING) /* don't do the read, it's not guaranteed to return an EOF, * just force an EOF */ YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars = 0; else { int num_to_read = YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1; while (num_to_read <= 0) { /* Not enough room in the buffer - grow it. */ /* just a shorter name for the current buffer */ YY_BUFFER_STATE b = YY_CURRENT_BUFFER_LVALUE; int yy_c_buf_p_offset = (int)(yyg->yy_c_buf_p - b->yy_ch_buf); if (b->yy_is_our_buffer) { int new_size = b->yy_buf_size * 2; if (new_size <= 0) b->yy_buf_size += b->yy_buf_size / 8; else b->yy_buf_size *= 2; b->yy_ch_buf = (char*) /* Include room in for 2 EOB chars. */ yyrealloc((void*)b->yy_ch_buf, (yy_size_t)(b->yy_buf_size + 2), yyscanner); } else /* Can't grow it, we don't own it. */ b->yy_ch_buf = NULL; if (!b->yy_ch_buf) YY_FATAL_ERROR("fatal error - scanner input buffer overflow"); yyg->yy_c_buf_p = &b->yy_ch_buf[yy_c_buf_p_offset]; num_to_read = YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1; } if (num_to_read > YY_READ_BUF_SIZE) num_to_read = YY_READ_BUF_SIZE; /* Read in more data. */ YY_INPUT((&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move]), yyg->yy_n_chars, num_to_read); YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars; } if (yyg->yy_n_chars == 0) { if (number_to_move == YY_MORE_ADJ) { ret_val = EOB_ACT_END_OF_FILE; yyrestart(yyin, yyscanner); } else { ret_val = EOB_ACT_LAST_MATCH; YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_EOF_PENDING; } } else ret_val = EOB_ACT_CONTINUE_SCAN; if ((yyg->yy_n_chars + number_to_move) > YY_CURRENT_BUFFER_LVALUE->yy_buf_size) { /* Extend the array by 50%, plus the number we really need. */ int new_size = yyg->yy_n_chars + number_to_move + (yyg->yy_n_chars >> 1); YY_CURRENT_BUFFER_LVALUE->yy_ch_buf = (char*)yyrealloc((void*)YY_CURRENT_BUFFER_LVALUE->yy_ch_buf, (yy_size_t)new_size, yyscanner); if (!YY_CURRENT_BUFFER_LVALUE->yy_ch_buf) YY_FATAL_ERROR("out of dynamic memory in yy_get_next_buffer()"); /* "- 2" to take care of EOB's */ YY_CURRENT_BUFFER_LVALUE->yy_buf_size = (int)(new_size - 2); } yyg->yy_n_chars += number_to_move; YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars] = YY_END_OF_BUFFER_CHAR; YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars + 1] = YY_END_OF_BUFFER_CHAR; yyg->yytext_ptr = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[0]; return ret_val; } /* yy_get_previous_state - get the state just before the EOB char was reached */ static yy_state_type yy_get_previous_state (yyscan_t yyscanner) { yy_state_type yy_current_state; char* yy_cp; struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yy_current_state = yyg->yy_start; yy_current_state += YY_AT_BOL(); for (yy_cp = yyg->yytext_ptr + YY_MORE_ADJ; yy_cp < yyg->yy_c_buf_p; ++yy_cp) { YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1); if (yy_accept[yy_current_state]) { yyg->yy_last_accepting_state = yy_current_state; yyg->yy_last_accepting_cpos = yy_cp; } while (yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state) { yy_current_state = (int)yy_def[yy_current_state]; if (yy_current_state >= 646) yy_c = yy_meta[yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + yy_c]; } return yy_current_state; } /* yy_try_NUL_trans - try to make a transition on the NUL character * * synopsis * next_state = yy_try_NUL_trans( current_state ); */ static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state , yyscan_t yyscanner) { int yy_is_jam; struct yyguts_t* yyg = (struct yyguts_t*) yyscanner; /* This var may be unused depending upon options. */ char* yy_cp = yyg->yy_c_buf_p; YY_CHAR yy_c = 1; if (yy_accept[yy_current_state]) { yyg->yy_last_accepting_state = yy_current_state; yyg->yy_last_accepting_cpos = yy_cp; } while (yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state) { yy_current_state = (int)yy_def[yy_current_state]; if (yy_current_state >= 646) yy_c = yy_meta[yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + yy_c]; yy_is_jam = (yy_current_state == 645); (void)yyg; return yy_is_jam ? 0 : yy_current_state; } #ifndef YY_NO_UNPUT #endif #ifndef YY_NO_INPUT #ifdef __cplusplus static int yyinput (yyscan_t yyscanner) #else static int input (yyscan_t yyscanner) #endif { int c; struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; *yyg->yy_c_buf_p = yyg->yy_hold_char; if ( *yyg->yy_c_buf_p == YY_END_OF_BUFFER_CHAR ) { /* yy_c_buf_p now points to the character we want to return. * If this occurs *before* the EOB characters, then it's a * valid NUL; if not, then we've hit the end of the buffer. */ if ( yyg->yy_c_buf_p < &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars] ) /* This was really a NUL. */ *yyg->yy_c_buf_p = '\0'; else { /* need more input */ int offset = (int)(yyg->yy_c_buf_p - yyg->yytext_ptr); ++yyg->yy_c_buf_p; switch (yy_get_next_buffer(yyscanner)) { case EOB_ACT_LAST_MATCH: /* This happens because yy_g_n_b() * sees that we've accumulated a * token and flags that we need to * try matching the token before * proceeding. But for input(), * there's no matching to consider. * So convert the EOB_ACT_LAST_MATCH * to EOB_ACT_END_OF_FILE. */ /* Reset buffer status. */ yyrestart(yyin, yyscanner); /*FALLTHROUGH*/ case EOB_ACT_END_OF_FILE: { if (yywrap(yyscanner)) return 0; if (!yyg->yy_did_buffer_switch_on_eof) YY_NEW_FILE; #ifdef __cplusplus return yyinput(yyscanner); #else return input(yyscanner); #endif } case EOB_ACT_CONTINUE_SCAN: yyg->yy_c_buf_p = yyg->yytext_ptr + offset; break; } } } c = *(unsigned char*)yyg->yy_c_buf_p; /* cast for 8-bit char's */ *yyg->yy_c_buf_p = '\0'; /* preserve yytext */ yyg->yy_hold_char = *++yyg->yy_c_buf_p; YY_CURRENT_BUFFER_LVALUE->yy_at_bol = (c == '\n'); return c; } #endif /* ifndef YY_NO_INPUT */ /** Immediately switch to a different input stream. * @param input_file A readable stream. * @param yyscanner The scanner object. * @note This function does not reset the start condition to @c INITIAL . */ void yyrestart(FILE* input_file, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; if (!YY_CURRENT_BUFFER) { yyensure_buffer_stack(yyscanner); YY_CURRENT_BUFFER_LVALUE = yy_create_buffer(yyin, YY_BUF_SIZE, yyscanner); } yy_init_buffer(YY_CURRENT_BUFFER, input_file, yyscanner); yy_load_buffer_state(yyscanner); } /** Switch to a different input buffer. * @param new_buffer The new input buffer. * @param yyscanner The scanner object. */ void yy_switch_to_buffer(YY_BUFFER_STATE new_buffer, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; /* TODO. We should be able to replace this entire function body * with * yypop_buffer_state(); * yypush_buffer_state(new_buffer); */ yyensure_buffer_stack(yyscanner); if (YY_CURRENT_BUFFER == new_buffer) return; if (YY_CURRENT_BUFFER) { /* Flush out information for old buffer. */ *yyg->yy_c_buf_p = yyg->yy_hold_char; YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = yyg->yy_c_buf_p; YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars; } YY_CURRENT_BUFFER_LVALUE = new_buffer; yy_load_buffer_state(yyscanner); /* We don't actually know whether we did this switch during * EOF (yywrap()) processing, but the only time this flag * is looked at is after yywrap() is called, so it's safe * to go ahead and always set it. */ yyg->yy_did_buffer_switch_on_eof = 1; } static void yy_load_buffer_state(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yyg->yy_n_chars = YY_CURRENT_BUFFER_LVALUE->yy_n_chars; yyg->yytext_ptr = yyg->yy_c_buf_p = YY_CURRENT_BUFFER_LVALUE->yy_buf_pos; yyin = YY_CURRENT_BUFFER_LVALUE->yy_input_file; yyg->yy_hold_char = *yyg->yy_c_buf_p; } /** Allocate and initialize an input buffer state. * @param file A readable stream. * @param size The character buffer size in bytes. When in doubt, use @c YY_BUF_SIZE. * @param yyscanner The scanner object. * @return the allocated buffer state. */ YY_BUFFER_STATE yy_create_buffer(FILE* file, int size, yyscan_t yyscanner) { YY_BUFFER_STATE b; b = (YY_BUFFER_STATE)yyalloc(sizeof(struct yy_buffer_state), yyscanner); if (!b) YY_FATAL_ERROR("out of dynamic memory in yy_create_buffer()"); b->yy_buf_size = size; /* yy_ch_buf has to be 2 characters longer than the size given because * we need to put in 2 end-of-buffer characters. */ b->yy_ch_buf = (char*)yyalloc((yy_size_t)(b->yy_buf_size + 2), yyscanner); if (!b->yy_ch_buf) YY_FATAL_ERROR("out of dynamic memory in yy_create_buffer()"); b->yy_is_our_buffer = 1; yy_init_buffer(b, file, yyscanner); return b; } /** Destroy the buffer. * @param b a buffer created with yy_create_buffer() * @param yyscanner The scanner object. */ void yy_delete_buffer(YY_BUFFER_STATE b, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; if (!b) return; if (b == YY_CURRENT_BUFFER) /* Not sure if we should pop here. */ YY_CURRENT_BUFFER_LVALUE = (YY_BUFFER_STATE)0; if (b->yy_is_our_buffer) yyfree((void*)b->yy_ch_buf, yyscanner); yyfree((void*)b, yyscanner); } /* Initializes or reinitializes a buffer. * This function is sometimes called more than once on the same buffer, * such as during a yyrestart() or at EOF. */ static void yy_init_buffer(YY_BUFFER_STATE b, FILE* file, yyscan_t yyscanner) { int oerrno = errno; struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yy_flush_buffer(b, yyscanner); b->yy_input_file = file; b->yy_fill_buffer = 1; /* If b is the current buffer, then yy_init_buffer was _probably_ * called from yyrestart() or through yy_get_next_buffer. * In that case, we don't want to reset the lineno or column. */ if (b != YY_CURRENT_BUFFER) { b->yy_bs_lineno = 1; b->yy_bs_column = 0; } b->yy_is_interactive = 0; errno = oerrno; } /** Discard all buffered characters. On the next scan, YY_INPUT will be called. * @param b the buffer state to be flushed, usually @c YY_CURRENT_BUFFER. * @param yyscanner The scanner object. */ void yy_flush_buffer(YY_BUFFER_STATE b, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; if (!b) return; b->yy_n_chars = 0; /* We always need two end-of-buffer characters. The first causes * a transition to the end-of-buffer state. The second causes * a jam in that state. */ b->yy_ch_buf[0] = YY_END_OF_BUFFER_CHAR; b->yy_ch_buf[1] = YY_END_OF_BUFFER_CHAR; b->yy_buf_pos = &b->yy_ch_buf[0]; b->yy_at_bol = 1; b->yy_buffer_status = YY_BUFFER_NEW; if (b == YY_CURRENT_BUFFER) yy_load_buffer_state(yyscanner); } /** Pushes the new state onto the stack. The new state becomes * the current state. This function will allocate the stack * if necessary. * @param new_buffer The new state. * @param yyscanner The scanner object. */ void yypush_buffer_state(YY_BUFFER_STATE new_buffer, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; if (new_buffer == NULL) return; yyensure_buffer_stack(yyscanner); /* This block is copied from yy_switch_to_buffer. */ if (YY_CURRENT_BUFFER) { /* Flush out information for old buffer. */ *yyg->yy_c_buf_p = yyg->yy_hold_char; YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = yyg->yy_c_buf_p; YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars; } /* Only push if top exists. Otherwise, replace top. */ if (YY_CURRENT_BUFFER) yyg->yy_buffer_stack_top++; YY_CURRENT_BUFFER_LVALUE = new_buffer; /* copied from yy_switch_to_buffer. */ yy_load_buffer_state(yyscanner); yyg->yy_did_buffer_switch_on_eof = 1; } /** Removes and deletes the top of the stack, if present. * The next element becomes the new top. * @param yyscanner The scanner object. */ void yypop_buffer_state(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; if (!YY_CURRENT_BUFFER) return; yy_delete_buffer(YY_CURRENT_BUFFER, yyscanner); YY_CURRENT_BUFFER_LVALUE = NULL; if (yyg->yy_buffer_stack_top > 0) --yyg->yy_buffer_stack_top; if (YY_CURRENT_BUFFER) { yy_load_buffer_state(yyscanner); yyg->yy_did_buffer_switch_on_eof = 1; } } /* Allocates the stack if it does not exist. * Guarantees space for at least one push. */ static void yyensure_buffer_stack(yyscan_t yyscanner) { yy_size_t num_to_alloc; struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; if (!yyg->yy_buffer_stack) { /* First allocation is just for 2 elements, since we don't know if this * scanner will even need a stack. We use 2 instead of 1 to avoid an * immediate realloc on the next call. */ num_to_alloc = 1; /* After all that talk, this was set to 1 anyways... */ yyg->yy_buffer_stack = (struct yy_buffer_state**)yyalloc( num_to_alloc * sizeof(struct yy_buffer_state*), yyscanner); if (!yyg->yy_buffer_stack) YY_FATAL_ERROR("out of dynamic memory in yyensure_buffer_stack()"); memset(yyg->yy_buffer_stack, 0, num_to_alloc * sizeof(struct yy_buffer_state*)); yyg->yy_buffer_stack_max = num_to_alloc; yyg->yy_buffer_stack_top = 0; return; } if (yyg->yy_buffer_stack_top >= (yyg->yy_buffer_stack_max) - 1) { /* Increase the buffer to prepare for a possible push. */ yy_size_t grow_size = 8 /* arbitrary grow size */; num_to_alloc = yyg->yy_buffer_stack_max + grow_size; yyg->yy_buffer_stack = (struct yy_buffer_state**)yyrealloc( yyg->yy_buffer_stack, num_to_alloc * sizeof(struct yy_buffer_state*), yyscanner); if (!yyg->yy_buffer_stack) YY_FATAL_ERROR("out of dynamic memory in yyensure_buffer_stack()"); /* zero only the new slots.*/ memset(yyg->yy_buffer_stack + yyg->yy_buffer_stack_max, 0, grow_size * sizeof(struct yy_buffer_state*)); yyg->yy_buffer_stack_max = num_to_alloc; } } /** Setup the input buffer state to scan directly from a user-specified * character buffer. * @param base the character buffer * @param size the size in bytes of the character buffer * @param yyscanner The scanner object. * @return the newly allocated buffer state object. */ YY_BUFFER_STATE yy_scan_buffer(char* base, yy_size_t size, yyscan_t yyscanner) { YY_BUFFER_STATE b; if (size < 2 || base[size - 2] != YY_END_OF_BUFFER_CHAR || base[size - 1] != YY_END_OF_BUFFER_CHAR) /* They forgot to leave room for the EOB's. */ return NULL; b = (YY_BUFFER_STATE)yyalloc(sizeof(struct yy_buffer_state), yyscanner); if (!b) YY_FATAL_ERROR("out of dynamic memory in yy_scan_buffer()"); b->yy_buf_size = (int)(size - 2); /* "- 2" to take care of EOB's */ b->yy_buf_pos = b->yy_ch_buf = base; b->yy_is_our_buffer = 0; b->yy_input_file = NULL; b->yy_n_chars = b->yy_buf_size; b->yy_is_interactive = 0; b->yy_at_bol = 1; b->yy_fill_buffer = 0; b->yy_buffer_status = YY_BUFFER_NEW; yy_switch_to_buffer(b, yyscanner); return b; } /** Setup the input buffer state to scan a string. The next call to yylex() will * scan from a @e copy of @a str. * @param yystr a NUL-terminated string to scan * @param yyscanner The scanner object. * @return the newly allocated buffer state object. * @note If you want to scan bytes that may contain NUL values, then use * yy_scan_bytes() instead. */ YY_BUFFER_STATE yy_scan_string(const char* yystr, yyscan_t yyscanner) { return yy_scan_bytes(yystr, (int)strlen(yystr), yyscanner); } /** Setup the input buffer state to scan the given bytes. The next call to * yylex() will scan from a @e copy of @a bytes. * @param yybytes the byte buffer to scan * @param _yybytes_len the number of bytes in the buffer pointed to by @a bytes. * @param yyscanner The scanner object. * @return the newly allocated buffer state object. */ YY_BUFFER_STATE yy_scan_bytes(const char* yybytes, int _yybytes_len, yyscan_t yyscanner) { YY_BUFFER_STATE b; char* buf; yy_size_t n; int i; /* Get memory for full buffer, including space for trailing EOB's. */ n = (yy_size_t)(_yybytes_len + 2); buf = (char*)yyalloc(n, yyscanner); if (!buf) YY_FATAL_ERROR("out of dynamic memory in yy_scan_bytes()"); for (i = 0; i < _yybytes_len; ++i) buf[i] = yybytes[i]; buf[_yybytes_len] = buf[_yybytes_len + 1] = YY_END_OF_BUFFER_CHAR; b = yy_scan_buffer(buf, n, yyscanner); if (!b) YY_FATAL_ERROR("bad buffer in yy_scan_bytes()"); /* It's okay to grow etc. this buffer, and we should throw it * away when we're done. */ b->yy_is_our_buffer = 1; return b; } #ifndef YY_EXIT_FAILURE #define YY_EXIT_FAILURE 2 #endif static void yynoreturn yy_fatal_error(const char* msg, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; (void)yyg; fprintf(stderr, "%s\n", msg); exit(YY_EXIT_FAILURE); } /* Redefine yyless() so it works in section 3 code. */ #undef yyless #define yyless(n) \ do \ { \ /* Undo effects of setting up yytext. */ \ int yyless_macro_arg = (n); \ YY_LESS_LINENO(yyless_macro_arg);\ yytext[yyleng] = yyg->yy_hold_char; \ yyg->yy_c_buf_p = yytext + yyless_macro_arg; \ yyg->yy_hold_char = *yyg->yy_c_buf_p; \ *yyg->yy_c_buf_p = '\0'; \ yyleng = yyless_macro_arg; \ } \ while ( 0 ) /* Accessor methods (get/set functions) to struct members. */ /** Get the user-defined data for this scanner. * @param yyscanner The scanner object. */ YY_EXTRA_TYPE yyget_extra(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; return yyextra; } /** Get the current line number. * @param yyscanner The scanner object. */ int yyget_lineno(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; if (!YY_CURRENT_BUFFER) return 0; return yylineno; } /** Get the current column number. * @param yyscanner The scanner object. */ int yyget_column(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; if (!YY_CURRENT_BUFFER) return 0; return yycolumn; } /** Get the input stream. * @param yyscanner The scanner object. */ FILE* yyget_in(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; return yyin; } /** Get the output stream. * @param yyscanner The scanner object. */ FILE* yyget_out(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; return yyout; } /** Get the length of the current token. * @param yyscanner The scanner object. */ int yyget_leng(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; return yyleng; } /** Get the current token. * @param yyscanner The scanner object. */ char* yyget_text(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; return yytext; } /** Set the user-defined data. This data is never touched by the scanner. * @param user_defined The data to be associated with this scanner. * @param yyscanner The scanner object. */ void yyset_extra(YY_EXTRA_TYPE user_defined, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yyextra = user_defined; } /** Set the current line number. * @param _line_number line number * @param yyscanner The scanner object. */ void yyset_lineno(int _line_number, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; /* lineno is only valid if an input buffer exists. */ if (!YY_CURRENT_BUFFER) YY_FATAL_ERROR("yyset_lineno called with no buffer"); yylineno = _line_number; } /** Set the current column. * @param _column_no column number * @param yyscanner The scanner object. */ void yyset_column(int _column_no, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; /* column is only valid if an input buffer exists. */ if (!YY_CURRENT_BUFFER) YY_FATAL_ERROR("yyset_column called with no buffer"); yycolumn = _column_no; } /** Set the input stream. This does not discard the current * input buffer. * @param _in_str A readable stream. * @param yyscanner The scanner object. * @see yy_switch_to_buffer */ void yyset_in(FILE* _in_str, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yyin = _in_str; } void yyset_out(FILE* _out_str, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yyout = _out_str; } int yyget_debug(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; return yy_flex_debug; } void yyset_debug(int _bdebug, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yy_flex_debug = _bdebug; } /* Accessor methods for yylval and yylloc */ YYSTYPE* yyget_lval(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; return yylval; } void yyset_lval(YYSTYPE* yylval_param, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; yylval = yylval_param; } /* User-visible API */ /* yylex_init is special because it creates the scanner itself, so it is * the ONLY reentrant function that doesn't take the scanner as the last * argument. That's why we explicitly handle the declaration, instead of using * our macros. */ int yylex_init(yyscan_t* ptr_yy_globals) { if (ptr_yy_globals == NULL) { errno = EINVAL; return 1; } *ptr_yy_globals = (yyscan_t)yyalloc(sizeof(struct yyguts_t), NULL); if (*ptr_yy_globals == NULL) { errno = ENOMEM; return 1; } /* By setting to 0xAA, we expose bugs in yy_init_globals. Leave at 0x00 for * releases. */ memset(*ptr_yy_globals, 0x00, sizeof(struct yyguts_t)); return yy_init_globals(*ptr_yy_globals); } /* yylex_init_extra has the same functionality as yylex_init, but follows the * convention of taking the scanner as the last argument. Note however, that * this is a *pointer* to a scanner, as it will be allocated by this call (and * is the reason, too, why this function also must handle its own declaration). * The user defined value in the first argument will be available to yyalloc in * the yyextra field. */ int yylex_init_extra(YY_EXTRA_TYPE yy_user_defined, yyscan_t* ptr_yy_globals) { struct yyguts_t dummy_yyguts; yyset_extra(yy_user_defined, &dummy_yyguts); if (ptr_yy_globals == NULL) { errno = EINVAL; return 1; } *ptr_yy_globals = (yyscan_t)yyalloc(sizeof(struct yyguts_t), &dummy_yyguts); if (*ptr_yy_globals == NULL) { errno = ENOMEM; return 1; } /* By setting to 0xAA, we expose bugs in yy_init_globals. Leave at 0x00 for releases. */ memset(*ptr_yy_globals, 0x00, sizeof(struct yyguts_t)); yyset_extra(yy_user_defined, *ptr_yy_globals); return yy_init_globals(*ptr_yy_globals); } static int yy_init_globals (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; /* Initialization is the same as for the non-reentrant scanner. * This function is called from yylex_destroy(), so don't allocate here. */ yyg->yy_buffer_stack = NULL; yyg->yy_buffer_stack_top = 0; yyg->yy_buffer_stack_max = 0; yyg->yy_c_buf_p = NULL; yyg->yy_init = 0; yyg->yy_start = 0; yyg->yy_start_stack_ptr = 0; yyg->yy_start_stack_depth = 0; yyg->yy_start_stack = NULL; /* Defined in main.c */ #ifdef YY_STDINIT yyin = stdin; yyout = stdout; #else yyin = NULL; yyout = NULL; #endif /* For future reference: Set errno on error, since we are called by * yylex_init() */ return 0; } /* yylex_destroy is for both reentrant and non-reentrant scanners. */ int yylex_destroy(yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; /* Pop the buffer stack, destroying each element. */ while (YY_CURRENT_BUFFER) { yy_delete_buffer(YY_CURRENT_BUFFER, yyscanner); YY_CURRENT_BUFFER_LVALUE = NULL; yypop_buffer_state(yyscanner); } /* Destroy the stack itself. */ yyfree(yyg->yy_buffer_stack, yyscanner); yyg->yy_buffer_stack = NULL; /* Destroy the start condition stack. */ yyfree(yyg->yy_start_stack, yyscanner); yyg->yy_start_stack = NULL; /* Reset the globals. This is important in a non-reentrant scanner so the next * time yylex() is called, initialization will occur. */ yy_init_globals(yyscanner); /* Destroy the main struct (reentrant only). */ yyfree(yyscanner, yyscanner); yyscanner = NULL; return 0; } /* * Internal utility routines. */ #ifndef yytext_ptr static void yy_flex_strncpy(char* s1, const char* s2, int n, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; (void)yyg; int i; for (i = 0; i < n; ++i) s1[i] = s2[i]; } #endif #ifdef YY_NEED_STRLEN static int yy_flex_strlen(const char* s, yyscan_t yyscanner) { int n; for (n = 0; s[n]; ++n) ; return n; } #endif void* yyalloc(yy_size_t size, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; (void)yyg; return malloc(size); } void* yyrealloc(void* ptr, yy_size_t size, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; (void)yyg; /* The cast to (char *) in the following accommodates both * implementations that use char* generic pointers, and those * that use void* generic pointers. It works with the latter * because both ANSI C and C++ allow castless assignment from * any pointer type to void*, and deal with argument conversions * as though doing an assignment. */ return realloc(ptr, size); } void yyfree(void* ptr, yyscan_t yyscanner) { struct yyguts_t* yyg = (struct yyguts_t*)yyscanner; (void)yyg; free((char*)ptr); /* see yyrealloc() for (char *) cast */ } #define YYTABLES_NAME "yytables"
Smartphone photography for smart assessment of post-surgical wounds - an experience during the COVID-19 pandemic. Aim: This study was undertaken with an aim to assess the feasibility of monitoring of wound healing by replacement of the traditional real-life clinical follow-up with a review of smartphone-captured digital photographs of post-surgical wounds by a clinician. Method: The postoperative patients who understood the objectives got enrolled in the prospective study and were followed up. They were requested to capture digital photographs of the wounds using smartphones and send them through WhatsApp on postoperative days 3, 5, 7, 15 and 30 or else whenever they felt need for review due to symptoms like fever, pain, redness or swelling. Results: 24 abdominal, 2 extremity and 1 neck wounds were followed up in 27 patients. Three patients developed complications and all were detected with 100% accuracy. As many as 81% of the patients were satisfied with this mode of postoperative wound assessment. Conclusion: The experience shows that in the follow-up of postsurgical wounds, there is a good correlation between real-life clinical and remote photographic review. Incorporating photographic wound assessment into the postoperative follow-up care pathway can potentially decrease the number of postoperative hospital visits and decrease the burden on healthcare facilities..
Сomparison of Au, Au-Pt, and Au-Ag nanoparticles as markers for immunochromatographic determination of nonylphenol Gold spherical nanoparticles, gold-platinum nanoflowers, and gold-silver nanostars were obtained and compared as labels for immunochromatographic analysis. The nanoparticles were synthesized by chemical reduction from various precursors and then conjugated with staphylococcal protein A to be used in indirect immunochromatographic determination of nonylphenol. The results obtained were evaluated in terms of analytical characteristics and R2 value, as well as the color intensity of the test band. According to the comparison results, it was revealed that the R2 value varied from 0.82 for the gold-silver nanostars to 0.96 for the spherical gold nanoparticles. The working range of determined concentrations was from 2 to 100 μg/mL for unspherical and from 2 to 50 μg/mL – for spherical markers used; the analysis time was 20 min.
// acm.timus.ru 1108. Heritage //1) 1/a[1] >= 1/a[2] >= 1/a[3] >= ... >= 1/a[n] // a[1] <= a[2] <= a[3] <= ... <= a[n] //2) sum(1/a[i]) < 1 //3) 1 - sum(1/a[i]) -- minimal // example: n = 1 . a[1] = 2 // n = 2 . a[1] = 2, a[2] = 3 // n = 3 2 3 7 // SOLUTION: // a[1] = 2. a[i] = a[i-1]*( a[ i - 1 ]-1) + 1 // a[i] = a[i-1] ^ 2 - a[i-1] + 1 #include <cstdio> #include <cstring> void judge_assert(bool b) { if ( ! b ) { printf("%d\n",1/b); // division by zero. } } typedef unsigned size_type ; typedef unsigned value_t ; typedef const value_t const_value_t; static const value_t T = 1000000000; static const value_t T_1 = T - 1; struct bigint { size_type n; value_t d[ 4096 ]; }; size_type stretch_size(const_value_t a[], size_type n) { while( n > 1 && a[ n - 1 ] == 0u ) --n; return n; } char out[ 65536 ]; char * w ; int print_bigint(bigint const& a) { size_type n = stretch_size(a.d, a.n); const_value_t * d = a.d; // last digit first printed. { value_t u = d[ n - 1]; value_t v = u; size_type m = 0; do ++m; while( v /= 10 ); for(size_type j = 0; j < m; ++j ) w[m - j - 1] = u % 10 + '0', u/=10; w += m; } if ( n > 1) { for(size_type i = n - 1; i > 0; --i) { value_t u = d[ i - 1 ]; for(size_type j = 0; j != 9; ++j) w[ 8 - j ] = u % 10 + '0', u /= 10; w += 9; } } *w++ = '\n'; return 0; } // c[] = a[] * b[] , where n <= 8 // return carry - is always equal to 0. value_t mul_simple(const_value_t a[], const_value_t b[], size_type n, value_t c[] ) { unsigned long long carry = 0, t; memset(c, 0u, sizeof( c[ 0 ] ) * ( n + n ) ); for(unsigned i = 0; i != n; ++i) { t = a[i]; for(unsigned j = 0; j != n; ++j) { carry += c[i + j] + t * b[j]; c[i + j] = carry % T; carry /= T; } for(unsigned j = n; carry > 0; ++j) { carry += c[i + j]; // b[j] == 0 c[ i + j ] = carry % T; carry /= T; } } return 0; } // c[] = a[] + q , 0<= q < T // return carry value_t add(const_value_t a[], size_type n, value_t q, value_t c[]) { for(size_type i = 0; i < n && q > 0 ;++i) { q += a[i]; c[i] = q >= T ? q - T : q; q = q >= T ? 1 : 0; } return q; } // c[] = a[] + b[] // return carry. value_t add(const_value_t a[], const_value_t b[], size_type n, value_t c[]) { value_t carry = 0; for(size_type i = 0; i != n; ++i) { carry += a[i] + b[ i ]; c[ i ] = carry >= T ? carry - T : carry; carry = carry >= T ? 1 : 0; } return carry; } // c[] = a[] - b[] // return carry with offset +1: 0 -> means carry = -1. 1 -> means carry = 0 value_t sub( const_value_t a[], const_value_t b[], size_type n, value_t c[] ) { value_t carry = +1; // +1 offset for(size_type i = 0; i != n; ++i) { carry += a[i] + ( T_1 - b[ i ] ); // T_1 = T - 1 c[i] = carry >= T ? carry - T : carry; carry = carry >= T ? 1 : 0; } return carry; } // c[] = a[] * b[] , where n may greate as 1000. // t[] - is temporary array, need for inner calculations. // return carry, i.e. 0. value_t mul_karasuba(const_value_t a[], const_value_t b[], size_type n, value_t c[], value_t t[] ) { size_type m; value_t ma, mb, mt; // n - 2^k if ( n <= 8 ) { return mul_simple(a, b, n, c); } m = n >> 1 ; // m = n / 2 //|------------------------------------------------------------------------------------------- //|1. a[0..n] = a[0..m] * T^m + a[m..n] //|2. b[0..n] = b[0..m] * T^m + b[m..n] //|----------------------------------------------------------------------------------------- //|Theory: //| A = a1 * T^m + a0 //| B = b1 * T^m + b0 //| //| C = A * B //| ------------------------------------------------------------- //| C = A * B = (a1 * T^m + a0) * (b1 * T^m + b0) = a1*b1 * T^(2m) + (a0*b1 + a1*b0)*T^m + a0*b0 //| //| Let P = a1 * b1 //| Q = a0 * b0 //| S = (a0 + a1) * ( b0 + b1) = a0 * b0 + a0*b1 + a1*b0 + a1 * b1 //| //| so a0 * b1 + a1 * b0 == S - P - Q //| //| C = P * T^(2m) + (S - P - Q) *T^m + Q //| //| we can product A and B only with 3 multiplication and one add, 2 subtract operations. //|------------------------------------------------------------------------------------------ //| c[0..m] = a0 + a1 //| c[n..n+m] = b0 + b1 ma = add(a, a + m, m, c); // c[0..m] = a[0..m] + a[m..n] mb = add(b, b + m, m, c + n); // c[n..n+m] = b[0..m] + b[m..n] //Note that: ma == 0 or ma == 1 // mb == 0 or mb == 1 // t[ 0..n ] = c[ 0 .. m ] * c[ n .. n + m ] mt = mul_karasuba( c, c + n, m, t, t + n); // t[ n ... n + n ] -- temporary usage if (ma > 0 && mb > 0) { // Let c0 = c[0..m] // c1 = c[n..n+m] // (T^m + c0)*(T^m + c1) = T^(2m) + (c0 + c1)*T^m + c0*c1 // c0 * c1 = t[0..n] - already calculated // t[m..n] = t[m..n] + c[0..m] ma = add(c , t + m , m , t + m); // t[m..n] = t[m..n] + c[n..n+m] mb = add(c + n, t + m , m , t + m); mt = ma + mb + 1; } else if (ma > 0) { // (T^m + c0) * c1 = c1*T^m + c0*c1 mt = add(c + n, t + m, m, t + m); } else if (mb > 0) { //c0 * (T^m + c1) = c0*T^m + c0*c1 mt = add(c, t + m, m, t + m); } else { mt = 0; } // now (a0 + a1)*(b0 + b1) = mt*T^(2m) + t[0..n] //2. step: a0 * b0 --> c[0..n] mul_karasuba(a , b, m, c, t + n); // t[n..n+n] - is temporary usage //3. step: a1* b1 --> c[n .. n + n] mul_karasuba(a + m, b + m, m, c + n, t + n); // t[n..n+n] - is temporary usage // S = (a0 + a1)*(b0+b1), P = a0*b0, Q = a1 * b1 // S = mt*T^(2m) + t[0..n], P = c[0..n] , Q = c[n..n+n] // S - P - Q ma = sub(t, c, n, t); // t[0..n] = t[0..n] - c[0..n] mb = sub(t, c + n, n , t); // t[0..n] = t[0..n] - c[n..n+n] judge_assert( mt >= ( 1- ma ) + ( 1 - mb ) ) ; mt -= ( 1 - ma ) + (1 - mb ); // S- P - Q = mt * T ^( 2m ) + t[0..n] // need add t[0..n] to c[ m..n + m ] // i.e. c[m..n+m] = c[m..n+m] + t[0..n] ma = add(c + m, t, n, c + m); // c[n + m] += ma + mt mt += ma; if (mt > 0) { // c[ n + m .. n + m + m] = c[ n + m .. n + m + m] + mt. length of array = m. mb = add(c + n + m, m, mt, c + n + m); judge_assert( mb == 0 ); } return 0; } static inline bool is_pow2(unsigned n){ return 0u == (n & (n - 1) ); } bigint t; // temporary // a = b * b value_t sqr(bigint& a, bigint const& b) { size_type n = stretch_size(b.d, b.n); while( !is_pow2( n ) ) ++n; mul_karasuba(b.d , b.d, n, a.d, t.d ); a.n = n + n; return 0; } //it's guaranteed fill with zero. bigint ans[ 20 ]; int main() { #ifndef ONLINE_JUDGE freopen("input.txt", "r", stdin); freopen("output.txt", "w", stdout); #endif ans[ 0 ].n = 1; ans[ 0 ].d[ 0 ] = 1; ans[1].n = 1; ans[1].d[ 0 ] = 2; for(int i = 2; i <= 18; ++i) { int n, c; // a[i] = a[i-1]^2 - a[i-1] + 1 sqr(ans[i], ans[i-1]); // now ans[i] = ans[i-1] ^ 2 n = ans[i].n; //ans[i] = ans[i] - ans[i-1] sub(ans[i].d, ans[i-1].d, n, ans[i].d); // ans[i] = ans[i] + 1 c = add(ans[i].d, ans[ 0 ].d, n, ans[ i ].d ); ans[i].n = stretch_size(ans[i].d, n + c); } int n; scanf("%d\n",&n); w = out; for(int i= 1; i<=n;++i) { print_bigint(ans[i]); } fwrite(out, 1, w - out, stdout); return 0; }
import { IStyleDefinition } from '..'; /** * Interface that defines a text reference */ export interface IToc extends IStyleDefinition { readonly title: any; readonly numberStyle?: IStyleDefinition; readonly textStyle?: IStyleDefinition; readonly textMargin?: number | [number, number] | [number, number, number, number]; }
# Copyright (c) Microsoft. All rights reserved. # Licensed under the MIT license. See LICENSE file in the project root for # full license information. import logging import threading from six.moves import queue import copy import time from concurrent.futures import ThreadPoolExecutor from azure.iot.hub import IoTHubRegistryManager from azure.iot.hub.protocol.models import Twin, TwinProperties, CloudToDeviceMethod from azure.eventhub import EventHubConsumerClient import e2e_settings logger = logging.getLogger("e2e.{}".format(__name__)) iothub_connection_string = e2e_settings.IOTHUB_CONNECTION_STRING iothub_name = e2e_settings.IOTHUB_NAME eventhub_connection_string = e2e_settings.EVENTHUB_CONNECTION_STRING eventhub_consumer_group = e2e_settings.EVENTHUB_CONSUMER_GROUP assert iothub_connection_string assert iothub_name assert eventhub_connection_string assert eventhub_consumer_group def convert_binary_dict_to_string_dict(src): def binary_to_string(x): if isinstance(x, bytes): return x.decode("utf-8") else: return x if src: dest = {} for key, value in src.items(): dest[binary_to_string(key)] = binary_to_string(value) return dest else: return src def get_device_id_from_event(event): """ Helper function to get the device_id from an EventHub message """ return event.message.annotations["iothub-connection-device-id".encode()].decode() def get_module_id_from_event(event): """ Helper function to get the module_id from an EventHub message """ if "iothub-connection-module_id" in event.message.annotations: return event.message.annotations["iothub-connection-module-id".encode()].decode() else: return None def get_message_source_from_event(event): """ Helper function to get the message source from an EventHub message """ return event.message.annotations["iothub-message-source".encode()].decode() class EventhubEvent(object): def __init__(self): self.device_id = None self.module_id = None self.message_body = None self.content_type = None self.system_properties = None self.properties = None class ServiceHelperSync(object): def __init__(self): self._executor = ThreadPoolExecutor() self._registry_manager = IoTHubRegistryManager(iothub_connection_string) logger.info( "Creating EventHubConsumerClient with consumer_group = {}".format( eventhub_consumer_group ) ) self._eventhub_consumer_client = EventHubConsumerClient.from_connection_string( eventhub_connection_string, consumer_group=eventhub_consumer_group ) self._eventhub_future = self._executor.submit(self._eventhub_thread) self.device_id = None self.module_id = None self.incoming_patch_queue = queue.Queue() self.cv = threading.Condition() self.incoming_eventhub_events = {} def set_identity(self, device_id, module_id): if device_id != self.device_id or module_id != self.module_id: self.device_id = device_id self.module_id = module_id self.incoming_patch_queue = queue.Queue() self.incoming_eventhub_events = {} def set_desired_properties(self, desired_props): if self.module_id: self._registry_manager.update_module_twin( self.device_id, self.module_id, Twin(properties=TwinProperties(desired=desired_props)), "*", ) else: self._registry_manager.update_twin( self.device_id, Twin(properties=TwinProperties(desired=desired_props)), "*" ) def invoke_method( self, method_name, payload, connect_timeout_in_seconds=30, response_timeout_in_seconds=None, ): request = CloudToDeviceMethod( method_name=method_name, payload=payload, response_timeout_in_seconds=response_timeout_in_seconds, connect_timeout_in_seconds=connect_timeout_in_seconds, ) if self.module_id: response = self._registry_manager.invoke_device_module_method( self.device_id, self.module_id, request ) else: response = self._registry_manager.invoke_device_method(self.device_id, request) return response def send_c2d(self, payload, properties): if self.module_id: raise TypeError("sending C2D to modules is not supported") self._registry_manager.send_c2d_message(self.device_id, payload, properties) def wait_for_eventhub_arrival(self, message_id, timeout=60): def get_event(): with self.cv: arrivals = self.incoming_eventhub_events # if message_id is not set, return any message if not message_id and len(arrivals): id = list(arrivals.keys())[0] else: id = message_id if id and (id in arrivals): value = arrivals[id] del arrivals[id] return value else: return None if timeout: end_time = time.time() + timeout else: end_time = None with self.cv: while True: ev = get_event() if ev or time.time() >= end_time: return ev if end_time: self.cv.wait(timeout=end_time - time.time()) else: self.cv.wait() def get_next_reported_patch_arrival(self, block=True, timeout=20): return self.incoming_patch_queue.get(block=block, timeout=timeout) def shutdown(self): if self._eventhub_consumer_client: self._eventhub_consumer_client.close() def _convert_incoming_event(self, event): event_body = event.body_as_json() device_id = get_device_id_from_event(event) module_id = get_module_id_from_event(event) if get_message_source_from_event(event) == "twinChangeEvents": return copy.deepcopy(event_body.get("properties", {})) else: message = EventhubEvent() message.device_id = device_id message.module_id = module_id message.message_body = event_body message.content_type = event.message.properties.content_type.decode("utf-8") message.system_properties = convert_binary_dict_to_string_dict(event.system_properties) message.properties = convert_binary_dict_to_string_dict(event.properties) return message def _store_eventhub_arrival(self, converted_event): message_id = converted_event.system_properties["message-id"] if message_id: with self.cv: self.incoming_eventhub_events[ converted_event.system_properties["message-id"] ] = converted_event self.cv.notify_all() def _store_patch_arrival(self, converted_event): self.incoming_patch_queue.put(converted_event) def _eventhub_thread(self): def on_error(partition_context, error): logger.error("EventHub on_error: {}".format(str(error) or type(error))) def on_partition_initialize(partition_context): logger.warning("EventHub on_partition_initialize") def on_partition_close(partition_context, reason): logger.warning("EventHub on_partition_close: {}".format(reason)) def on_event_batch(partition_context, events): for event in events: device_id = get_device_id_from_event(event) module_id = get_module_id_from_event(event) if device_id == self.device_id and module_id == self.module_id: converted_event = self._convert_incoming_event(event) if type(converted_event) == EventhubEvent: if "message-id" in converted_event.system_properties: logger.info( "Received event with msgid={}".format( converted_event.system_properties["message-id"] ) ) else: logger.info("Received event with no message id") else: logger.info( "Received {} for device {}, module {}".format( get_message_source_from_event(event), device_id, module_id, ) ) if isinstance(converted_event, EventhubEvent): self._store_eventhub_arrival(converted_event) else: self._store_patch_arrival(converted_event) try: with self._eventhub_consumer_client: logger.info("Starting EventHub receive") self._eventhub_consumer_client.receive_batch( max_wait_time=2, on_event_batch=on_event_batch, on_error=on_error, on_partition_initialize=on_partition_initialize, on_partition_close=on_partition_close, ) except Exception: logger.error("_eventhub_thread exception", exc_info=True) raise
Although Maryland legislatures last week weren't able to overturn an outdated law that required rape victims to share custody with their rapist who did not receive a conviction, they did manage to pass through some positive legislation before the end of the 2017 Assembly session. On Tuesday, Gov. Larry Hogan signed into law a bill that states rape victims don't need to prove they used force to resist their attackers for it to constitute as rape. Basically, the law changes the state's legal definition of rape. Before, it declared that if you didn't fight back, the action wasn't technically rape. In Maryland, an assault was classified as rape if a sexual act was not consensual and also included force or the threat of force. Therefore, the victim would have had to proved that the assailant had used or threatened force during the act. It is being referred to as the "No Means No" Law, which explains that saying "no" is enough to consider an act nonconsensual. Back in September, BuzzFeed News launched an investigation into how some Baltimore police departments classify some rape reports as "unfounded," without actually looking into them. According to BuzzFeed News, the FBI defines "unfounded" rape and sexual assault reports as those that are "false or baseless. You can start to see how many rape reports in Baltimore were considered unfounded — according to BuzzFeed News, it was 34 percent between 2009 and 2014 — when you consider that Maryland law stated the victim had to prove that he or she used force to resist the attacker. Before, if there was no evidence of resistance, the case was pretty much able to be automatically dismissed. According to The Washington Post, the Maryland Coalition Against Sexual Assault had audited three years of sexual assault cases that were stalled in Baltimore County and found that 42 of 124 cases were discharged by lack of evidence demonstrating physical resistance. Now that the law is revised to lift the burden of physical resistance evidence from the victim, we can only hope that the number of cases investigated will be higher. The "No Means No" bill was one of 200 Gov. Logan signed on Tuesday, which also included a bill to keep rape kits for 20 years and a bill that includes sex trafficking as sexual abuse. This bill and others are a great step forward in protecting the rights of sexual assault victims. Hopefully Maryland can keep the momentum going and push for more in next year's Assembly session.
They found that Australia continues to grant skilled migration visas to thousands of foreign cooks and accountants. This is despite an excess of local candidates suitable for those jobs, and government claims the migration program is attracting workers who have skills required by Australia. Confidential Immigration Department documents obtained by Fairfax Media reveal that many of those foreign workers who have gained permanent residency in Australia through skilled migration programs are likely to have first arrived as students in suspected fraudulent international education schemes operating between 2006 and 2010. An October 2009 department report made the startling finding that the Department of Immigration and Citizenship "may have been responsible for granting a record number of student visas to people who may not be considered genuine students as well as granting permanent residence to skilled migration applicants who do not have the appropriate skills being claimed.” Despite recent changes to make it harder to rort the student visa and skilled migration programs, high-level concerns remain about international education providers and visa fraud. Fairfax Media believes the Immigration Department's education visa consultative committee has recently discussed employer-sponsored migration rorts. In their report, titled Immigration and Unemployment, Dr Birrell and Dr Healy call for the halting of the recruitment of migrant workers whose occupations are in surplus in Australia or have available resident candidates. The pair - both from Monash University's Centre for Population and Urban Research - found Australia's job market had been creating only about 100,000 positions per year since the end of the mining boom in mid-2011. But unchanged foreign worker migration policies have led to the current situation where the number of people looking for jobs in Australia far outstrips the amount of positions available. Immigration Department data used in Dr Birrell and Dr Healy's research shows overseas-born cooks and accountants made up the leading occupations granted visas under the permanent entry skill program in 2012-13. Almost 8450 cooks, as well as 1022 pastry chefs and bakers, were given visas under the permanent entry skill program in 2012-13. For the same period, cooks also made up the largest occupational group given temporary 457 visas. This was despite cooking not having been on the federal government's skilled occupation list for more than four years. Their report also highlighted how Australia granted visas under the permanent resident skilled program and 457 category to nearly 7000 foreign accountants in 2012-13. This was despite 7200 domestic students completing bachelor or higher degrees in accounting in 2012, and the Commonwealth Department of Employment declaring “a more than adequate supply of accountants”. Dr Birrell said Australia's immigration program also encouraged rorting of the different visa categories to obtain permanent residency rather than educational outcomes, and the exploitation of foreign workers. “The whole system needs to be reviewed fundamentally,” he said. The labour force problems highlighted by Dr Birrell are a hangover from the widespread fraud in the student visa and skilled migration programs that are identified in a series of confidential Immigration Department documents obtained by Fairfax Media. The documents show how Immigration Department investigators repeatedly highlighted their concerns about student visa and skilled migration visa rorts between 2006 and 2010. But their calls for more enforcement and resources to tackle the problem went unheeded for years. An October 2009 “in-confidence” Immigration Department report identified “significant concerns” in Victoria's international educational industry and “in particular, related pathways to permanent residence”. The report warned: “The department's investigators reported that foreign students were paying about $50,000 to achieve permanent residency through a 'significant cottage industry' involving 'migration agents, employers and education providers who are linked to a significant level of organised fraud and crime."' The former federal Labor government made changes in 2011 to clamp down on the rorting of the student visa program to obtain permanent residency by introducing a new points test for skilled migrants. But Dr Birrell said the decision to shield the thousands of foreign students already here from the impact of the reforms and to continue to allow them to apply for residency under the old rules has resulted in the “warehousing” of a large number of low-priority applicants. Loading He said many of these had studied cooking courses and had only begun to be processed by the Immigration Department in 2011-12. “This is why cooks and accountants have been the major occupations visaed in this and in subsequent years. We know this because the visa-issued data identifies those who were former overseas students,” Dr Birrell said.
/** * yzhang Comment method "updateCodeText". */ private void updateCodeText() { Display.getDefault().syncExec(new Runnable() { /* * (non-Javadoc) * * @see java.lang.Runnable#run() */ @Override public void run() { Object o = ((IStructuredSelection) tableViewer.getSelection()).getFirstElement(); if (o == null) { return; } Map map = (Map) o; Object object = map.get(SQLPatternUtils.SQLPATTERNLIST); String id = null; if (object instanceof String) { id = (String) object; id = id.split(SQLPatternUtils.ID_SEPARATOR)[0]; } else { TableItem item = tableViewer.getTable().getSelection()[0]; id = item.getText(); } IRepositoryViewObject repositoryObject = SQLPatternUtils.getLastVersionRepositoryObjectById(id); byte[] code = ((SQLPatternItem) repositoryObject.getProperty().getItem()).getContent().getInnerContent(); codeText.setText(new String(code)); } }); }
/* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package allocationreconciler import ( "fmt" v1 "k8s.io/api/core/v1" "k8s.io/klog" loadbalancing_v1alpha1 "plenus.io/plenuslb/pkg/apis/loadbalancing/v1alpha1" "plenus.io/plenuslb/pkg/controller/ephemeralips" "plenus.io/plenuslb/pkg/controller/ipallocations" "plenus.io/plenuslb/pkg/controller/persistentips" "plenus.io/plenuslb/pkg/controller/servicesupdater" "plenus.io/plenuslb/pkg/controller/utils" ) // ReconcileAllocation ensures consistency between service and IPAllocation func ReconcileAllocation(service *v1.Service, allocation *loadbalancing_v1alpha1.IPAllocation) (*loadbalancing_v1alpha1.IPAllocation, error) { klog.Infof("Reconciling allocation %s/%s with the service", allocation.GetNamespace(), allocation.GetName()) if !utils.ServiceIsLoadBalancer(service) { return nil, serviceIsNoLongerALoadBalancer(service, allocation) } expectedAllocationType := expectedAllocationType(service) if allocation.Spec.Type != expectedAllocationType { return changeAllocationType(service, allocation) } if expectedAllocationType == loadbalancing_v1alpha1.PersistentIP { return reconcilePersistentAllocation(service, allocation) } if expectedAllocationType == loadbalancing_v1alpha1.EphemeralIP { return reconcileEphemeralAllocation(service, allocation) } return nil, nil } // -> has externals ips? // -> if yes: // -> check the current allocation as the same ips allocated // -> update the allocation and set as pending (allocator will ensure allocations) // -> deallocate and release removed ips // -> if no: why? if is persistent it should func reconcilePersistentAllocation(service *v1.Service, allocation *loadbalancing_v1alpha1.IPAllocation) (*loadbalancing_v1alpha1.IPAllocation, error) { if hasExternalsIPs, ips := utils.ServiceHasExternalIPs(service); hasExternalsIPs { patched, removedAllocations, allocErr, err := persistentips.CheckAndPatchAllocation(service.GetNamespace(), service.GetName(), ips, allocation) if err != nil { return nil, err } // THIS IS VERY IMPORTANT! // I do cleanup operations only if the allocation was is success state. // This because otherwise I risk to deallocate address of other service/allocation if allocation.Status.State == loadbalancing_v1alpha1.AllocationStatusSuccess { for _, alloc := range removedAllocations { klog.Infof("Removing ip %s because is no longer used by service %s/%s", alloc.Address, service.GetNamespace(), service.GetName()) persistentips.ReleaseIP(alloc.Pool, service.GetNamespace(), alloc.Address) persistentips.DeallocateAddress(alloc) } } if allocErr != nil { klog.Errorf("Error patching allocation for service %s/%s: %v", service.GetNamespace(), service.GetName(), allocErr) return patched, allocErr } return patched, nil } err := fmt.Errorf("Service %s/%s is expected as Persistent but hasn't external ips", service.GetNamespace(), service.GetName()) klog.Error(err) return nil, err } // -> has externals ips? // -> if yes: // -> check if the allocation has a node and, if necessary, the network interface // -> if yes: why? if is ephemeral it shouldn't func reconcileEphemeralAllocation(service *v1.Service, allocation *loadbalancing_v1alpha1.IPAllocation) (*loadbalancing_v1alpha1.IPAllocation, error) { if hasExternalsIPs, _ := utils.ServiceHasExternalIPs(service); !hasExternalsIPs { if (len(service.Status.LoadBalancer.Ingress) == 1 && len(allocation.Spec.Allocations) == 1 && service.Status.LoadBalancer.Ingress[0].IP == allocation.Spec.Allocations[0].Address) || len(service.Status.LoadBalancer.Ingress) == 0 { patched, allocErr := ephemeralips.CheckAndPatchAllocation(allocation) return patched, allocErr } klog.Warningf("Allocation %s/%s is out of sync with service, deleting and waiting for recreation", allocation.GetNamespace(), allocation.GetName()) //destruptive situation, better to recreate the allocation err := ipallocations.DeleteAllocationByName(allocation.GetNamespace(), allocation.GetName()) if err != nil { klog.Error(err) return nil, err } return nil, nil } err := fmt.Errorf("Service %s/%s is expected as Ephemeral but has external ips", service.GetNamespace(), service.GetName()) klog.Error(err) return nil, err } // delete and reacreate the allocation func changeAllocationType(service *v1.Service, allocation *loadbalancing_v1alpha1.IPAllocation) (*loadbalancing_v1alpha1.IPAllocation, error) { currentAllocationType := allocation.Spec.Type klog.Infof("Changing allocation of service %s/%s type from %s to %s", allocation.GetNamespace(), allocation.GetName(), currentAllocationType, expectedAllocationType(service)) err := ipallocations.DeleteAllocationByName(allocation.GetNamespace(), allocation.GetName()) if err != nil { klog.Error(err) return nil, err } return nil, nil } // CreateAllocationForService creates a new IPAllocation according to the given service func CreateAllocationForService(service *v1.Service) (*loadbalancing_v1alpha1.IPAllocation, error) { if hasExternalIPS, ips := utils.ServiceHasExternalIPs(service); hasExternalIPS && len(ips) > 0 { allocation, err := persistentips.EnsurePersistentAllocation(service.GetNamespace(), service.GetName(), ips) if err != nil { klog.Error(err) return nil, err } return allocation, nil } allocation, err := ephemeralips.EnsureEphemeralAllocation(service.GetNamespace(), service.GetName()) if err != nil { klog.Error(err) return nil, err } return allocation, nil } func serviceIsNoLongerALoadBalancer(service *v1.Service, allocation *loadbalancing_v1alpha1.IPAllocation) error { // if now is ehemeral we neet to remove ingress addresses currentAllocationType := allocation.Spec.Type if currentAllocationType == loadbalancing_v1alpha1.EphemeralIP { err := servicesupdater.RemoveServiceIngressIPs(service.GetNamespace(), service.GetName()) if err != nil { return err } } err := ipallocations.DeleteAllocationByName(allocation.GetNamespace(), allocation.GetName()) if err != nil { klog.Error(err) return err } return nil } func expectedAllocationType(service *v1.Service) loadbalancing_v1alpha1.IPType { if hasExternalIPS, ips := utils.ServiceHasExternalIPs(service); hasExternalIPS && len(ips) > 0 { return loadbalancing_v1alpha1.PersistentIP } return loadbalancing_v1alpha1.EphemeralIP }
import collections n = int(input()) S = [input() for _ in range(n)] abc = list("abcdefghijklmnopqrstuvwxyz") dic = dict(zip(abc,[10**9]*26)) for i in range(n): tmp = collections.Counter(list(S[i])) for j in abc: if dic[j] > tmp[j]: dic[j] = tmp[j] ans = [] for k in abc: for l in range(dic[k]): ans.append(k) print(*ans,sep="")
def initializePage(self): SempyWizard.settings.setValue("interval", 5) for i in FilterPage.box_group.buttons(): SempyWizard.settings.beginGroup("Repositories") if i.isChecked() is True: SempyWizard.settings.setValue(i.text(), "True") else: SempyWizard.settings.setValue(i.text(), "False") SempyWizard.settings.endGroup()
<gh_stars>1-10 # flake8: noqa import sys import numpy as np def get_bound_method_class(m): """Return the class for a bound method. """ return m.im_class if sys.version < '3' else m.__self__.__class__ def safe_as_int(val, atol=1e-3): """ Attempt to safely cast values to integer format. Parameters ---------- val : scalar or iterable of scalars Number or container of numbers which are intended to be interpreted as integers, e.g., for indexing purposes, but which may not carry integer type. atol : float Absolute tolerance away from nearest integer to consider values in ``val`` functionally integers. Returns ------- val_int : NumPy scalar or ndarray of dtype `np.int64` Returns the input value(s) coerced to dtype `np.int64` assuming all were within ``atol`` of the nearest integer. Notes ----- This operation calculates ``val`` modulo 1, which returns the mantissa of all values. Then all mantissas greater than 0.5 are subtracted from one. Finally, the absolute tolerance from zero is calculated. If it is less than ``atol`` for all value(s) in ``val``, they are rounded and returned in an integer array. Or, if ``val`` was a scalar, a NumPy scalar type is returned. If any value(s) are outside the specified tolerance, an informative error is raised. Examples -------- >>> safe_as_int(7.0) 7 >>> safe_as_int([9, 4, 2.9999999999]) array([9, 4, 3]) >>> safe_as_int(53.1) Traceback (most recent call last): ... ValueError: Integer argument required but received 53.1, check inputs. >>> safe_as_int(53.01, atol=0.01) 53 """ mod = np.asarray(val) % 1 # Extract mantissa # Check for and subtract any mod values > 0.5 from 1 if mod.ndim == 0: # Scalar input, cannot be indexed if mod > 0.5: mod = 1 - mod else: # Iterable input, now ndarray mod[mod > 0.5] = 1 - mod[mod > 0.5] # Test on each side of nearest int try: np.testing.assert_allclose(mod, 0, atol=atol) except AssertionError: raise ValueError("Integer argument required but received " "{0}, check inputs.".format(val)) return np.round(val).astype(np.int64)
/*+__+ zhopa:skoro respa(*/ /*sps bfsu za rabotu-pishi bfs s used i obnovlyai v nem srazu*/ #include <bits/stdc++.h> //#include <ext/pb_ds/assoc_container.hpp> //#include <ext/pb_ds/tree_policy.hpp> #define f first #define s second #define vi_a vector<int>a; #define p_b push_back #define ll long long #define ull unsigned long long #define ld long double #define pll pair<long long,long long> #define pii pair<int,int> #define m_p make_pair #define fast_io cin.tie(0);cout.tie(0);ios_base::sync_with_stdio(0); #define all(x) x.begin(),x.end() #define sqr(x) (x)*(x) #define pw(x) (1ll << x) #define sz(x) (int)x.size() #define endl "\n" #define len(a) a.length() #define rep(l,r) for(ll i=l;i<r;i++) #define ump unordered_map using namespace std; //using namespace __gnu_pbds; const ll N = 300100; const ll M = 1000100; const long long oo =LLONG_MAX; const ll PW = 20; const ll md = int(1e9) + 7; const ll mod = 2000000001; struct triple{ ll a,b,c; }; signed main(){ fast_io; ll n,k; cin>>n>>k; set<pll>cur; set<pll>st,nd; vector<pll>a(n); ll l,r; for(ll i=0;i<n;i++){ cin>>l>>r; st.insert({l,i}); nd.insert({r,i}); a[i]={l,r}; } //ll ans=0; vector<ll>ans; for(ll i=0;i<=2*1e5;i++){ while((*st.begin()).f==i){ cur.insert({a[(*st.begin()).s].s,(*st.begin()).s}); st.erase(st.begin()); } while(sz(cur)>k){ auto it=cur.end();it--; nd.erase((*it)); ans.p_b((*it).s); cur.erase(it); // ans.p_b((*cur.begin()).s); } while((*nd.begin()).f==i){ cur.erase((*nd.begin())); nd.erase(nd.begin()); } } cout<<sz(ans)<<endl; for(auto z : ans) cout<<z+1<<" "; } /* 7 2 11 11 9 11 7 8 7 8 */
// GetCSR gets a certificate signing request. func (c *CryptoContext) GetCSR(name string, subjectCountry string, subjectOrganization string) ([]byte, error) { uid, err := c.GetUUID(name) if err != nil { return nil, err } template := &x509.CertificateRequest{ SignatureAlgorithm: x509.ECDSAWithSHA256, Subject: pkix.Name{ Country: []string{subjectCountry}, Organization: []string{subjectOrganization}, CommonName: uid.String(), }, } priv, err := c.getDecodedPrivateKey(uid) if err != nil { return nil, err } return x509.CreateCertificateRequest(rand.Reader, template, priv) }
A large mural to honor the late Montreal singer, songwriter and poet Leonard Cohen is expected to go up on Crescent St. by the end of the summer. Mayor Denis Coderre confirmed that bit of news on Twitter on Tuesday morning. "This magnificent wall in downtown will become, by next September, a magnificent 8500 square foot mural of Leonard Cohen," the mayor tweeted in French. The mural will be painted on the north side of a 20-story highrise building on Crescent St., just north of Ste. Catherine St. Coderre had promised that Cohen would receive an honor from the city after the world learned on Nov. 10 that he had died. Cohen actually died in Los Angeles three days earlier, on Nov. 7, and that his body had been flown back to Montreal to be buried before the news was made public. The Leonard Cohen mural is part of a series of similar murals honoring Montreal's "cultural builders" — a group that includes jazz greats Oscar Peterson and Oliver Jones, and legendary writer Mordecai Richler. The non-profit arts collective MU is behind the series. Some who responded to Coderre's tweet applauded the honor, while others wondered why the mural wasn't someplace closer to his home near St. Laurent Blvd.
<gh_stars>1-10 // Copyright 2010 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package runtime import ( "runtime/internal/atomic" "unsafe" ) type mOS struct { waitsemacount uint32 notesig *int8 errstr *byte } func closefd(fd int32) int32 //go:noescape func open(name *byte, mode, perm int32) int32 //go:noescape func pread(fd int32, buf unsafe.Pointer, nbytes int32, offset int64) int32 //go:noescape func pwrite(fd int32, buf unsafe.Pointer, nbytes int32, offset int64) int32 func seek(fd int32, offset int64, whence int32) int64 //go:noescape func exits(msg *byte) //go:noescape func brk_(addr unsafe.Pointer) int32 func sleep(ms int32) int32 func rfork(flags int32) int32 //go:noescape func plan9_semacquire(addr *uint32, block int32) int32 //go:noescape func plan9_tsemacquire(addr *uint32, ms int32) int32 //go:noescape func plan9_semrelease(addr *uint32, count int32) int32 //go:noescape func notify(fn unsafe.Pointer) int32 func noted(mode int32) int32 //go:noescape func nsec(*int64) int64 //go:noescape func sigtramp(ureg, msg unsafe.Pointer) func setfpmasks() //go:noescape func tstart_plan9(newm *m) func errstr() string type _Plink uintptr //go:linkname os_sigpipe os.sigpipe func os_sigpipe() { throw("too many writes on closed pipe") } func sigpanic() { g := getg() if !canpanic(g) { throw("unexpected signal during runtime execution") } note := gostringnocopy((*byte)(unsafe.Pointer(g.m.notesig))) switch g.sig { case _SIGRFAULT, _SIGWFAULT: i := index(note, "addr=") if i >= 0 { i += 5 } else if i = index(note, "va="); i >= 0 { i += 3 } else { panicmem() } addr := note[i:] g.sigcode1 = uintptr(atolwhex(addr)) if g.sigcode1 < 0x1000 || g.paniconfault { panicmem() } print("unexpected fault address ", hex(g.sigcode1), "\n") throw("fault") case _SIGTRAP: if g.paniconfault { panicmem() } throw(note) case _SIGINTDIV: panicdivide() case _SIGFLOAT: panicfloat() default: panic(errorString(note)) } } func atolwhex(p string) int64 { for hasprefix(p, " ") || hasprefix(p, "\t") { p = p[1:] } neg := false if hasprefix(p, "-") || hasprefix(p, "+") { neg = p[0] == '-' p = p[1:] for hasprefix(p, " ") || hasprefix(p, "\t") { p = p[1:] } } var n int64 switch { case hasprefix(p, "0x"), hasprefix(p, "0X"): p = p[2:] for ; len(p) > 0; p = p[1:] { if '0' <= p[0] && p[0] <= '9' { n = n*16 + int64(p[0]-'0') } else if 'a' <= p[0] && p[0] <= 'f' { n = n*16 + int64(p[0]-'a'+10) } else if 'A' <= p[0] && p[0] <= 'F' { n = n*16 + int64(p[0]-'A'+10) } else { break } } case hasprefix(p, "0"): for ; len(p) > 0 && '0' <= p[0] && p[0] <= '7'; p = p[1:] { n = n*8 + int64(p[0]-'0') } default: for ; len(p) > 0 && '0' <= p[0] && p[0] <= '9'; p = p[1:] { n = n*10 + int64(p[0]-'0') } } if neg { n = -n } return n } type sigset struct{} // Called to initialize a new m (including the bootstrap m). // Called on the parent thread (main thread in case of bootstrap), can allocate memory. func mpreinit(mp *m) { // Initialize stack and goroutine for note handling. mp.gsignal = malg(32 * 1024) mp.gsignal.m = mp mp.notesig = (*int8)(mallocgc(_ERRMAX, nil, true)) // Initialize stack for handling strings from the // errstr system call, as used in package syscall. mp.errstr = (*byte)(mallocgc(_ERRMAX, nil, true)) } func msigsave(mp *m) { } func msigrestore(sigmask sigset) { } func sigblock() { } // Called to initialize a new m (including the bootstrap m). // Called on the new thread, cannot allocate memory. func minit() { if atomic.Load(&exiting) != 0 { exits(&emptystatus[0]) } // Mask all SSE floating-point exceptions // when running on the 64-bit kernel. setfpmasks() } // Called from dropm to undo the effect of an minit. func unminit() { } var sysstat = []byte("/dev/sysstat\x00") func getproccount() int32 { var buf [2048]byte fd := open(&sysstat[0], _OREAD, 0) if fd < 0 { return 1 } ncpu := int32(0) for { n := read(fd, unsafe.Pointer(&buf), int32(len(buf))) if n <= 0 { break } for i := int32(0); i < n; i++ { if buf[i] == '\n' { ncpu++ } } } closefd(fd) if ncpu == 0 { ncpu = 1 } return ncpu } var devswap = []byte("/dev/swap\x00") var pagesize = []byte(" pagesize\n") func getPageSize() uintptr { var buf [2048]byte var pos int fd := open(&devswap[0], _OREAD, 0) if fd < 0 { // There's not much we can do if /dev/swap doesn't // exist. However, nothing in the memory manager uses // this on Plan 9, so it also doesn't really matter. return minPhysPageSize } for pos < len(buf) { n := read(fd, unsafe.Pointer(&buf[pos]), int32(len(buf)-pos)) if n <= 0 { break } pos += int(n) } closefd(fd) text := buf[:pos] // Find "<n> pagesize" line. bol := 0 for i, c := range text { if c == '\n' { bol = i + 1 } if bytesHasPrefix(text[i:], pagesize) { // Parse number at the beginning of this line. return uintptr(_atoi(text[bol:])) } } // Again, the page size doesn't really matter, so use a fallback. return minPhysPageSize } func bytesHasPrefix(s, prefix []byte) bool { if len(s) < len(prefix) { return false } for i, p := range prefix { if s[i] != p { return false } } return true } var pid = []byte("#c/pid\x00") func getpid() uint64 { var b [20]byte fd := open(&pid[0], 0, 0) if fd >= 0 { read(fd, unsafe.Pointer(&b), int32(len(b))) closefd(fd) } c := b[:] for c[0] == ' ' || c[0] == '\t' { c = c[1:] } return uint64(_atoi(c)) } func osinit() { initBloc() ncpu = getproccount() physPageSize = getPageSize() getg().m.procid = getpid() notify(unsafe.Pointer(funcPC(sigtramp))) } func crash() { notify(nil) *(*int)(nil) = 0 } //go:nosplit func getRandomData(r []byte) { extendRandom(r, 0) } func goenvs() { } func initsig(preinit bool) { } //go:nosplit func osyield() { sleep(0) } //go:nosplit func usleep(µs uint32) { ms := int32(µs / 1000) if ms == 0 { ms = 1 } sleep(ms) } //go:nosplit func nanotime() int64 { var scratch int64 ns := nsec(&scratch) // TODO(aram): remove hack after I fix _nsec in the pc64 kernel. if ns == 0 { return scratch } return ns } //go:nosplit func itoa(buf []byte, val uint64) []byte { i := len(buf) - 1 for val >= 10 { buf[i] = byte(val%10 + '0') i-- val /= 10 } buf[i] = byte(val + '0') return buf[i:] } var goexits = []byte("go: exit ") var emptystatus = []byte("\x00") var exiting uint32 func goexitsall(status *byte) { var buf [_ERRMAX]byte if !atomic.Cas(&exiting, 0, 1) { return } getg().m.locks++ n := copy(buf[:], goexits) n = copy(buf[n:], gostringnocopy(status)) pid := getpid() for mp := (*m)(atomic.Loadp(unsafe.Pointer(&allm))); mp != nil; mp = mp.alllink { if mp.procid != 0 && mp.procid != pid { postnote(mp.procid, buf[:]) } } getg().m.locks-- } var procdir = []byte("/proc/") var notefile = []byte("/note\x00") func postnote(pid uint64, msg []byte) int { var buf [128]byte var tmp [32]byte n := copy(buf[:], procdir) n += copy(buf[n:], itoa(tmp[:], pid)) copy(buf[n:], notefile) fd := open(&buf[0], _OWRITE, 0) if fd < 0 { return -1 } len := findnull(&msg[0]) if write(uintptr(fd), unsafe.Pointer(&msg[0]), int32(len)) != int64(len) { closefd(fd) return -1 } closefd(fd) return 0 } //go:nosplit func exit(e int) { var status []byte if e == 0 { status = emptystatus } else { // build error string var tmp [32]byte status = append(itoa(tmp[:len(tmp)-1], uint64(e)), 0) } goexitsall(&status[0]) exits(&status[0]) } // May run with m.p==nil, so write barriers are not allowed. //go:nowritebarrier func newosproc(mp *m, stk unsafe.Pointer) { if false { print("newosproc mp=", mp, " ostk=", &mp, "\n") } pid := rfork(_RFPROC | _RFMEM | _RFNOWAIT) if pid < 0 { throw("newosproc: rfork failed") } if pid == 0 { tstart_plan9(mp) } } //go:nosplit func semacreate(mp *m) { } //go:nosplit func semasleep(ns int64) int { _g_ := getg() if ns >= 0 { ms := timediv(ns, 1000000, nil) if ms == 0 { ms = 1 } ret := plan9_tsemacquire(&_g_.m.waitsemacount, ms) if ret == 1 { return 0 // success } return -1 // timeout or interrupted } for plan9_semacquire(&_g_.m.waitsemacount, 1) < 0 { // interrupted; try again (c.f. lock_sema.go) } return 0 // success } //go:nosplit func semawakeup(mp *m) { plan9_semrelease(&mp.waitsemacount, 1) } //go:nosplit func read(fd int32, buf unsafe.Pointer, n int32) int32 { return pread(fd, buf, n, -1) } //go:nosplit func write(fd uintptr, buf unsafe.Pointer, n int32) int64 { return int64(pwrite(int32(fd), buf, n, -1)) } func memlimit() uint64 { return 0 } var _badsignal = []byte("runtime: signal received on thread not created by Go.\n") // This runs on a foreign stack, without an m or a g. No stack split. //go:nosplit func badsignal2() { pwrite(2, unsafe.Pointer(&_badsignal[0]), int32(len(_badsignal)), -1) exits(&_badsignal[0]) } func raisebadsignal(sig uint32) { badsignal2() } func _atoi(b []byte) int { n := 0 for len(b) > 0 && '0' <= b[0] && b[0] <= '9' { n = n*10 + int(b[0]) - '0' b = b[1:] } return n } func signame(sig uint32) string { if sig >= uint32(len(sigtable)) { return "" } return sigtable[sig].name }
<filename>flash_briefing_test.go package golexa import ( "encoding/json" "net/url" "testing" ) func testBriefing() *FlashBriefing { return NewFlashBriefing("test", "Beer is yum") } func TestNewFlashBriefing(t *testing.T) { fb := testBriefing() if fb.Title != "test" { t.Errorf("Expected title to be test, got %v", fb.Title) } } func TestAddAudioContent(t *testing.T) { urlString := "https://beer.com/mp3.wav" fb := testBriefing() u, _ := url.Parse(urlString) fb.AddAudioContent(u) if fb.AudioContent != urlString { t.Errorf("Expected fb.DisplayURL to be %v, got %v", urlString, fb.AudioContent) } } func TestAddDisplayURL(t *testing.T) { urlString := "https://beer.com/" fb := testBriefing() u, _ := url.Parse(urlString) fb.AddDisplayURL(u) if fb.DisplayURL != urlString { t.Errorf("Expected fb.DisplayURL to be %v, got %v", urlString, fb.DisplayURL) } } func TestMarshalToJSON(t *testing.T) { _, err := json.Marshal(testBriefing()) if err != nil { t.Error(err) } } func TestString(t *testing.T) { fb := testBriefing() expect := `{"uid":"` if fb.String()[0:8] != expect { t.Errorf("Expected %s, got %s", expect, fb.String()[0:8]) } }
/** * This is the main menu for the client * @throws IOException */ public void clientScreen() throws IOException { boolean showMenu = true; while(showMenu){ System.out.print("Welcome to the client screen\n" + "There are 3 Lotteries (Lotto, Jackpot and National " + "Please select one (L, J, N) or E to exit: "); Scanner s = new Scanner (System.in); while(!s.hasNext("[LlJjNnEe]")) { System.out.println("Try again"); s.next(); } char choice2 = s.next().charAt(0); if(choice2 == 'E' || choice2 == 'e') { break; }else if(choice2 == 'L' || choice2 == 'l') { lotto(); } else if(choice2 == 'J' || choice2 == 'j') { jackPot(); } else if(choice2 == 'N' || choice2 == 'n') { national(); } } }
“All you needed was a cup of tea, a light, and your stereo, you know, and that’s what I had.” - Steve Jobs It is a rare sort of person that will spend $149,995 on a vinyl-record player, but that person exists. Welcome to the agony and ecstasy of being an audiophile. Their goal is to listen to music that is reproduced as closely as possible to the artist's live performance. Making recorded music sound pretty close to a live performance is pretty expensive. Making it sound very close to a live performance is are-you-freakin-crazy expensive. The bajillion dollar turntable in question - the Continuum Caliburn. Since one of the most popular uses of Priceonomics is to look up the resale value of speakers, amplifiers, preamps, cd player, turntables and other audiophile accessories, we thought we’d dig in a little deeper. What does it mean to be an audiophile and just how darn expensive a hobby/addiction is it? What does it mean to be an Audiophile? "A key goal of audiophiles is to capture the experience of a live musical performance in a room with good acoustics, and reproduce it at home. It is widely agreed that this is very difficult and that even the best-regarded recording and playback systems rarely, if ever, achieve it.” - Wikipedia Audiophiles are focused on hearing high quality sound. The pursuit of this objective results in some behaviours that most people might find odd: eschewing MP3s and music streaming, spending thousands of dollars on connector cables, and worrying about miniscule vibrations caused by their systems affecting sound quality. The main parts of an audio system are outlined by Stereophile: "Thus I've come to associate the three major component categories - source, amplification, and loudspeaker - with three distinct phases of the playback process: information, interpretation, and presentation.” For an audiophile, the source means a vinyl-record player, CD player, SACD player or computer with a high resolution audio files and a digital to analogue converter. For the rest of us, the source means our phone or computers playing a compressed music format. An audiophile has an amplifier (either an “integrated amplifier” or two separate units called a “preamp” and “amplifier”) that interprets and powers the signal from the “source”. An average Joe might just use some computer speakers or a Jambox that doesn’t even have an amplifier that is distinct from the speakers. The final aspect of the audiophile sound system are the speakers. Music-focused audiophiles usually just need two speakers since music is normally just recorded in two channels. There are, of course, home theater enthusiasts with 5-channel surround sound systems as well as audiophiles that prefer headphones. Just how much does this cost? Gizmodo had a useful framework for thinking about sound quality on a scale from 0 to 100: “Our little scale, unfortunately, is logarithmic, in that going from zero to 85 doesn't take a lot of effort or money, but going from 98.6 to 99.1 by swapping out a $2,600 AC power cable for a $4,000 one becomes a justifiable end.” If you get bit by the audiophile bug (and have a lot of disposable income), all of a sudden you have to spend more and more money to be satisfied by the sound quality of your stereo. Just how far does this rabbit hole go? We decided stop by the venerable House of Music in San Francisco to find out. We spoke with Rick Rubin, a 9 year veteran of the store and devoted audiophile. Rick showed us a range of audiophile setups appropriate for the ultra rich, pretty rich and merely well to do. The first system Rick showed us was a $250,000 system with all McIntosh components. We listened to the “The Girl from Ipanema” on CD and the six foot tall speakers dazzled. It sounded great, but holy crap, a quarter of million dollars! Going down the rung a bit was a $45,000 system with McIntosh components and the second most expensive speakers sold by Bowers and Wilkens (B&W). We listened to a vinyl record which sounded amazing. This system was set up in its own dedicated listening room which made the whole experience feel a lot more tranquil and exquisite. Given the choice of this audio system or a brand new luxury car as a free gift, this author might actually be tempted to choose the audio system (but realistically, probably wouldn’t in case anyone is offering!). Next, Rick showed us an “entry-level” audiophile system that cost $7,000. This system had a $4,000 integrated amplifier and $3,000 pair of speakers (so you’d also probably need to buy a source like a turntable or cd player, which would add to the cost significantly). $3,000 speakers are the outer-most pair. $7,000+ is a bit steep for a sound system, so we asked Rick to recommend a good sound system for someone who just has a regular job and budget. He paired a $500 Integra integrated amp with a $600 pair of B&W speakers and suggested using your computer as a source (playing high quality digital formats only of course!). $600 speaker pair and $500 amplifier. PLANT NOT INCLUDED. To this author’s unsophisticated ears, all of the sound systems sounded really excellent. Did the $250,000 system sound better than the $1,100 system? It felt that way, but it was hard to know for sure. Maybe it sounded better, or maybe it was just louder or had a more comfortable chair in the listening room. The chair was really nice. Conclusion From digging around audiophile forums and listening to awesome sound systems at the House of Music, this author gained a true appreciation for those that pursue aural perfection. Just like a student of meditation focuses intensely on the breath to still the mind and tune out errant thoughts, audiophiles meditate on music. They strive to clear out all other distractions and just focus on beautiful, wonderful sound. Their ability to focus and appreciation for music is enviable. At the same time, it’s hard not to think that audiophiles are on the wrong side of history. As the rest of the world joyfully listens to Spotify and Pandora on their iPhones, audiophiles still listen to CDs! The march of technological progress has many casualties, and sound quality may just be one of them. The product desires of audiophiles are so idiosyncratic and contrary to popular tastes; they have an uphill battle to protect sound quality against faster, cheaper and more social access to music. This post was written by Rohin Dhar. Follow him on Twitter here or on Google Plus where you can point out to him any technical errors he made about audiophile setups. Get the latest from Priceonomics on Facebook or Twitter.
<gh_stars>0 from django.contrib import admin from django.utils.html import format_html from django.utils.translation import gettext_lazy as _ from adminsortable2.admin import SortableAdminMixin from ..models import Status @admin.register(Status) class StatusAdmin(SortableAdminMixin, admin.ModelAdmin): list_display = ('name', 'show_color') def show_color(self, obj): return format_html( '<div style="background-color: {}; width: 45px; height: 12px; border: 1px dashed #000;"></div>', obj.color ) show_color.short_description = _('color')
<filename>2-Data_Cut_mooring_npp_timeseries.py<gh_stars>0 #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Jan 9 14:56:43 2020 Cut out the NPP for each mooring Has been updated to include the TPCA chlorophyll. Downloadable from http://dx.doi.org/10.25914/5dccbd3b64bdc (Pittman et al., 2019. JGR Oceans) Chlorophyll will also need to be downloaded from https://oceandata.sci.gsfc.nasa.gov/ This script has been designed for chlor_a products for viirs, modis, meris and seawifs. @author: <NAME> 2020 """ import xarray as xr import os import requests import numpy as np def Download_TPCA(): ''' Function to download TPCA MODIS and SeaWIFS from nci.org. Possible to also use DAPPS through xarray and save the files rather than using requests. ''' path_sw='datasets/chl/tpca/seawifs/' if not os.path.isdir(path_sw): print('Creating directory: ',path_sw) os.makedirs(path_sw) path_mod='datasets/chl/tpca/modis/' if not os.path.isdir(path_mod): print('Creating directory: ',path_mod) os.makedirs(path_mod) tpca_link=['http://dapds00.nci.org.au/thredds/fileServer/ks32/CLEX_Data/TPCA_reprocessing/v2019_01/'] sensors=['SeaWiFS/tpca_seawifs_','MODIS-Aqua/tpca_modis_aqua_'] #and then year sens=['sw','mod'] #Download SeaWiFS files from the above array, spaced by each year. for i in range(0,2): #To do SeaWiFS and then MODIS for yr in np.arange(1997,2020): if i==0: #SW sensor=tpca_link[0]+sensors[0]+str(yr)+'.nc' path=path_sw elif i==1: #MODIS sensor=tpca_link[0]+sensors[1]+str(yr)+'.nc' path=path_mod #Start the download try: r = requests.get(sensor)#,timeout=s20) fileloc=path+sensors[0].split('/')[1]+str(yr)+'.nc' if r.status_code!=404: with open(fileloc, 'wb') as f: f.write(r.content) print('Downloaded: ' + sens[i] + str(yr)) else: print(i,str(r.status_code)) except KeyboardInterrupt: import sys sys.exit() except: print(str(yr)+ sens[i]+' Unavailable') pass def process_npp_moorings(): buff=0.5 #in degrees. l=0 lats=[l+buff,l-buff] lns=[165,190,205,220,235,250] lons=[[lns[0]-buff,lns[0]+buff], [lns[1]-buff,lns[1]+buff], [lns[2]-buff,lns[2]+buff], [lns[3]-buff,lns[3]+buff], [lns[4]-buff,lns[4]+buff], [lns[5]-buff,lns[5]+buff]] #lons=[[164.5,165.5],[189.5,190.5],[204.5,205.5],[219.5,220.5],[234.5,235.5],[249.5,250.5]] #One degree over the moorings. #Might attempt some different sizes in the future. mooring_sites=['165E','170W','155W','140W','125W','110W'] npp_models=['datasets/npp_satellite/vgpm_mod_nc/*', 'datasets/npp_satellite/cbpm_mod_nc/*', 'datasets/npp_satellite/eppley_mod_nc/*', 'datasets/npp_satellite/cafe_mod_nc/*', 'datasets/npp_satellite/cafe_sw_nc/*', 'datasets/npp_satellite/vgbm_sw_nc/*', 'datasets/npp_satellite/cbpm_sw_nc/*', 'datasets/npp_satellite/eppley_sw_nc/*', 'datasets/npp_satellite/vgpm_viirs_nc/*', 'datasets/npp_satellite/cbpm_viirs_nc/*', 'datasets/npp_satellite/eppley_viirs_nc/*'] out_path='processed/npp_mooring_timeseries/' for i,x in enumerate(npp_models): try: dat=xr.open_mfdataset(x,concat_dim='time',combine='nested') for ii,ll in enumerate(lons): datslice=dat.sel(lat=slice(lats[0],lats[1]),lon=slice(ll[0],ll[1])).mean(dim=['lat','lon']).npp.to_series() dataset=npp_models[i] mooring_site=mooring_sites[ii] series_name= dataset.split('/')[2]+'_'+mooring_site+'.csv' print('saved to: '+out_path+series_name) if not os.path.isdir(out_path): print('Creating directory: ',out_path) os.makedirs(out_path) datslice.to_csv(out_path+series_name) except: print('Skipped: '+x) pass process_npp_moorings() #Depending where data stored probably want to comment the breakpoint out. print('CHECK HERE - There is a breakpoint installed.') print('If you have TPCA or NASA chlor_a downloaded, please ignore this, and then update the file paths accordingly') print('If not, TPCA will be downloaded automatically now into datasets/chl/tpca/seawifs/') print('NASA Chlor_a is a little harder, you will need an account with NASA') print('https://oceancolor.gsfc.nasa.gov/data/download_methods/') print('A shell script in datasets/chl/download_NASA_chlora.sh is provided, you will need to follow the instructions at:') print('https://oceancolor.gsfc.nasa.gov/data/download_methods/ to get the auth cookie working') Download_TPCA() #Downloads Pittman 2019 data print('TPCA Downloaded, make sure chlor_a is also available') #sw_tpca='/g/data/ua8/ocean_color/TPCA_reprocessing/SeaWiFS/*nc' #mod_tpca='/g/data/ua8/ocean_color/TPCA_reprocessing/MODIS-Aqua/*nc' sw_tpca='datasets/chl/tpca/seawifs/*nc' mod_tpca='datasets/chl/tpca/modis/*nc' buff=0.5 #in degrees. l=0 lats=[l+buff,l-buff] lns=[165,190,205,220,235,250] lons=[[lns[0]-buff,lns[0]+buff], [lns[1]-buff,lns[1]+buff], [lns[2]-buff,lns[2]+buff], [lns[3]-buff,lns[3]+buff], [lns[4]-buff,lns[4]+buff], [lns[5]-buff,lns[5]+buff]] mooring_sites=['165E','170W','155W','140W','125W','110W'] out_path='processed/npp_mooring_timeseries/' #paths=['/g/data/ua8/ocean_color/tropics/','/day/9km/chlor_a/*nc'] paths='datasets/chl/chlor_a/' chl_models=['seawifs','modis','meris','viirs'] #This uses the downloaded versions in datasets/chl/download shell script. If the data is stored locally, you can indicate it like the commented paths. You might also need to change the path definitions below as well (commented line). for i,x in enumerate(chl_models): path = paths+x+'/*.nc' #path=paths[0]+x+paths[1] dat=xr.open_mfdataset(path,concat_dim='time',combine='nested',engine='h5netcdf') for ii,ll in enumerate(lons): datslice=dat.sel(lat=slice(lats[0],lats[1]),lon=slice(ll[0],ll[1])).mean(dim=['lat','lon']).chlor_a.to_series() dataset=chl_models[i] mooring_site=mooring_sites[ii] series_name= dataset+'_chlor_a_'+mooring_site+'.csv' print('saved to: '+out_path+series_name) datslice.to_csv(out_path+series_name) if i==0: chlor_dat=xr.open_mfdataset(sw_tpca,concat_dim='time',combine='nested') chlslice=chlor_dat.sel(lat=slice(lats[0],lats[1]),lon=slice(ll[0],ll[1])).mean(dim=['lat','lon']).chl_tpca.to_series() chlslice.to_csv(out_path+'seawifs_chl_tpca_'+mooring_site+'.csv') chlor_dat=xr.open_mfdataset(mod_tpca,concat_dim='time',combine='nested') chlslice=chlor_dat.sel(lat=slice(lats[0],lats[1]),lon=slice(ll[0],ll[1])).mean(dim=['lat','lon']).chl_tpca.to_series() chlslice.to_csv(out_path+'modis_chl_tpca_'+mooring_site+'.csv') #Will save this 5 times but whatever... Sorry for the inefficiency.
<filename>node/node.py import socket import sys import time import threading import random import hashlib import json import pickle class NodeConnection(threading.Thread): """The class NodeConnection is used by the class Node and represent the TCP/IP socket connection with another node. Both inbound (nodes that connect with the server) and outbound (nodes that are connected to) are represented by this class. The class contains the client socket and hold the id information of the connecting node. Communication is done by this class. When a connecting node sends a message, the message is relayed to the main node (that created this NodeConnection in the first place). Instantiates a new NodeConnection. Do not forget to start the thread. All TCP/IP communication is handled by this connection. main_node: The Node class that received a connection. sock: The socket that is assiociated with the client connection. id: The id of the connected node (at the other side of the TCP/IP connection). host: The host/ip of the main node. port: The port of the server of the main node.""" def __init__(self, main_node, sock, id, host, port): """Instantiates a new NodeConnection. Do not forget to start the thread. All TCP/IP communication is handled by this connection. main_node: The Node class that received a connection. sock: The socket that is assiociated with the client connection. id: The id of the connected node (at the other side of the TCP/IP connection). host: The host/ip of the main node. port: The port of the server of the main node.""" super(NodeConnection, self).__init__() self.host = host self.port = port self.main_node = main_node self.sock = sock self.terminate_flag = threading.Event() # The id of the connected node self.id = id # End of transmission character for the network streaming messages. self.EOT_CHAR = 0x04.to_bytes(1, 'big') # Datastore to store additional information concerning the node. self.info = {} self.main_node.debug_print("NodeConnection.send: Started with client (" + self.id + ") '" + self.host + ":" + str(self.port) + "'") def send(self, data, encoding_type='utf-8'): """Send the data to the connected node. The data can be pure text (str), dict object (send as json) and bytes object. When sending bytes object, it will be using standard socket communication. A end of transmission character 0x04 utf-8/ascii will be used to decode the packets ate the other node.""" if isinstance(data, str): self.sock.sendall( data.encode(encoding_type) + self.EOT_CHAR ) elif isinstance(data, dict): try: json_data = json.dumps(data) json_data = json_data.encode(encoding_type) + self.EOT_CHAR self.sock.sendall(json_data) except TypeError as type_error: self.main_node.debug_print('This dict is invalid') self.main_node.debug_print(type_error) except Exception as e: print('Unexpected Error in send message') print(e) elif isinstance(data, bytes): bin_data = data + self.EOT_CHAR self.sock.sendall(bin_data) else: self.main_node.debug_print('datatype used is not valid plese use str, dict (will be send as json) or bytes') # This method should be implemented by yourself! We do not know when the message is # correct. # def check_message(self, data): # return True # Stop the node client. Please make sure you join the thread. def stop(self): """Terminates the connection and the thread is stopped.""" self.terminate_flag.set() def parse_packet(self, packet): """Parse the packet and determines wheter it has been send in str, json or byte format. It returns the according data.""" try: packet_decoded = packet.decode('utf-8') try: return json.loads(packet_decoded) except json.decoder.JSONDecodeError: return packet_decoded except UnicodeDecodeError: return packet # Required to implement the Thread. This is the main loop of the node client. def run(self): """The main loop of the thread to handle the connection with the node. Within the main loop the thread waits to receive data from the node. If data is received the method node_message will be invoked of the main node to be processed.""" self.sock.settimeout(10.0) buffer = b'' # Hold the stream that comes in! while not self.terminate_flag.is_set(): chunk = b'' try: chunk = self.sock.recv(4096) except socket.timeout: self.main_node.debug_print("NodeConnection: timeout") except Exception as e: self.terminate_flag.set() self.main_node.debug_print('Unexpected error') self.main_node.debug_print(e) # BUG: possible buffer overflow when no EOT_CHAR is found => Fix by max buffer count or so? if chunk != b'': buffer += chunk eot_pos = buffer.find(self.EOT_CHAR) while eot_pos > 0: packet = buffer[:eot_pos] buffer = buffer[eot_pos + 1:] self.main_node.message_count_recv += 1 self.main_node.node_message( self, self.parse_packet(packet) ) eot_pos = buffer.find(self.EOT_CHAR) time.sleep(0.01) # IDEA: Invoke (event) a method in main_node so the user is able to send a bye message to the node before it is closed? self.sock.settimeout(None) self.sock.close() self.main_node.debug_print("NodeConnection: Stopped") def set_info(self, key, value): self.info[key] = value def get_info(self, key): return self.info[key] def __str__(self): return 'NodeConnection: {}:{} <-> {}:{} ({})'.format(self.main_node.host, self.main_node.port, self.host, self.port, self.id) def __repr__(self): return '<NodeConnection: Node {}:{} <-> Connection {}:{}>'.format(self.main_node.host, self.main_node.port, self.host, self.port) """ TODO : Variabele to limit the number of connected nodes. TODO : Also create events when things go wrong, like a connection with a node has failed. """ class Node(threading.Thread): """Implements a node that is able to connect to other nodes and is able to accept connections from other nodes. After instantiation, the node creates a TCP/IP server with the given port. Create instance of a Node. If you want to implement the Node functionality with a callback, you should provide a callback method. It is preferred to implement a new node by extending this Node class. host: The host name or ip address that is used to bind the TCP/IP server to. port: The port number that is used to bind the TCP/IP server to. callback: (optional) The callback that is invokes when events happen inside the network def node_callback(event, main_node, connected_node, data): event: The event string that has happened. main_node: The main node that is running all the connections with the other nodes. connected_node: Which connected node caused the event. data: The data that is send by the connected node.""" def __init__(self, host, port, callback=None): """Create instance of a Node. If you want to implement the Node functionality with a callback, you should provide a callback method. It is preferred to implement a new node by extending this Node class. host: The host name or ip address that is used to bind the TCP/IP server to. port: The port number that is used to bind the TCP/IP server to. callback: (optional) The callback that is invokes when events happen inside the network.""" super(Node, self).__init__() # When this flag is set, the node will stop and close self.terminate_flag = threading.Event() # Server details, host (or ip) to bind to and the port self.host = host self.port = port # Events are send back to the given callback self.callback = callback # Nodes that have established a connection with this node self.nodes_inbound = [] # Nodes that are connect with us N->(US)->N # Nodes that this nodes is connected to self.nodes_outbound = [] # Nodes that we are connected to (US)->N # Create a unique ID for each node. # TODO: A fixed unique ID is required for each node, node some random is created, need to think of it. id = hashlib.sha512() t = self.host + str(self.port) + str(random.randint(1, 99999999)) id.update(t.encode('ascii')) self.id = id.hexdigest() # Start the TCP/IP server self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.init_server() # Message counters to make sure everyone is able to track the total messages self.message_count_send = 0 self.message_count_recv = 0 self.message_count_rerr = 0 # Debugging on or off! self.debug = False @property def all_nodes(self): """Return a list of all the nodes, inbound and outbound, that are connected with this node.""" return self.nodes_inbound + self.nodes_outbound def debug_print(self, message): """When the debug flag is set to True, all debug messages are printed in the console.""" if self.debug: print("DEBUG: " + message) def init_server(self): """Initialization of the TCP/IP server to receive connections. It binds to the given host and port.""" print("Initialisation of the Node on port: " + str(self.port) + " on node (" + self.id + ")") self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.sock.bind((self.host, self.port)) self.sock.settimeout(10.0) self.sock.listen(1) def print_connections(self): """Prints the connection overview of the node. How many inbound and outbound connections have been made.""" print("Node connection overview:") print("- Total nodes connected with us: %d" % len(self.nodes_inbound)) print("- Total nodes connected to : %d" % len(self.nodes_outbound)) def delete_closed_connections(self): """Misleading function name, while this function checks whether the connected nodes have been terminated by the other host. If so, clean the array list of the nodes. When a connection is closed, an event is send node_message or outbound_node_disconnected.""" for n in self.nodes_inbound: if n.terminate_flag.is_set(): self.inbound_node_disconnected(n) n.join() del self.nodes_inbound[self.nodes_inbound.index(n)] for n in self.nodes_outbound: if n.terminate_flag.is_set(): self.outbound_node_disconnected(n) n.join() del self.nodes_outbound[self.nodes_inbound.index(n)] def send_to_nodes(self, data, exclude=[]): """ Send a message to all the nodes that are connected with this node. data is a python variable which is converted to JSON that is send over to the other node. exclude list gives all the nodes to which this data should not be sent.""" self.message_count_send = self.message_count_send + 1 for n in self.nodes_inbound: if n in exclude: self.debug_print("Node send_to_nodes: Excluding node in sending the message") else: self.send_to_node(n, data) for n in self.nodes_outbound: if n in exclude: self.debug_print("Node send_to_nodes: Excluding node in sending the message") else: self.send_to_node(n, data) def send_to_node(self, n, data): """ Send the data to the node n if it exists.""" self.message_count_send = self.message_count_send + 1 self.delete_closed_connections() if n in self.nodes_inbound or n in self.nodes_outbound: try: n.send(data) except Exception as e: self.debug_print("Node send_to_node: Error while sending data to the node (" + str(e) + ")") else: self.debug_print("Node send_to_node: Could not send the data, node is not found!") def connect_with_node(self, host, port): """ Make a connection with another node that is running on host with port. When the connection is made, an event is triggered outbound_node_connected. When the connection is made with the node, it exchanges the id's of the node. First we send our id and then we receive the id of the node we are connected to. When the connection is made the method outbound_node_connected is invoked. TODO: think wheter we need an error event to trigger when the connection has failed!""" if host == self.host and port == self.port: print("connect_with_node: Cannot connect with yourself!!") return False # Check if node is already connected with this node! for node in self.nodes_outbound: if node.host == host and node.port == port: print("connect_with_node: Already connected with this node.") return True try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.debug_print("connecting to %s port %s" % (host, port)) sock.connect((host, port)) # Basic information exchange (not secure) of the id's of the nodes! sock.send(self.id.encode('utf-8')) # Send my id to the connected node! connected_node_id = sock.recv(4096).decode('utf-8') # When a node is connected, it sends it id! thread_client = self.create_new_connection(sock, connected_node_id, host, port) thread_client.start() self.nodes_outbound.append(thread_client) self.outbound_node_connected(thread_client) except Exception as e: self.debug_print("TcpServer.connect_with_node: Could not connect with node. (" + str(e) + ")") print("save connected node returning") self.save_connected_node(host,port) def save_connected_node(self,host,port): print("hello from def") node_list = self.get_connected_node() already_in_list = False for element in node_list: if element[0] == host and element[1] == port: already_in_list = True if not already_in_list: new_node_list = [] new_node_list.append(host) new_node_list.append(port) node_list.append(new_node_list) import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), "..")) from config import get_config old_cwd = os.getcwd() os.chdir(get_config().main_folder) with open('connected_node.mix_blockchain_network', 'wb') as connected_node_file: pickle.dump(node_list, connected_node_file) os.chdir(old_cwd) def get_connected_node(self): try: import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), "..")) from config import get_config old_cwd = os.getcwd() os.chdir(get_config().main_folder) with open('connected_node.mix_blockchain_network', 'rb') as connected_node_file: node_list = pickle.load(connected_node_file) os.chdir(old_cwd) except: node_list = [] return node_list def connectionfrommix_blockchain_network(self): node_list = self.get_connected_node() for element in node_list: print(element) self.connect_with_node(element[0], element[1]) def disconnect_with_node(self, node): """Disconnect the TCP/IP connection with the specified node. It stops the node and joins the thread. The node will be deleted from the nodes_outbound list. Before closing, the method node_disconnect_with_outbound_node is invoked.""" if node in self.nodes_outbound: self.node_disconnect_with_outbound_node(node) node.stop() node.join() # When this is here, the application is waiting and waiting del self.nodes_outbound[self.nodes_outbound.index(node)] else: print("Node disconnect_with_node: cannot disconnect with a node with which we are not connected.") def stop(self): """Stop this node and terminate all the connected nodes.""" self.node_request_to_stop() self.terminate_flag.set() # This method can be overrided when a different nodeconnection is required! def create_new_connection(self, connection, id, host, port): """When a new connection is made, with a node or a node is connecting with us, this method is used to create the actual new connection. The reason for this method is to be able to override the connection class if required. In this case a NodeConnection will be instantiated to represent the node connection.""" return NodeConnection(self, connection, id, host, port) def run(self): """The main loop of the thread that deals with connections from other nodes on the network. When a node is connected it will exchange the node id's. First we receive the id of the connected node and secondly we will send our node id to the connected node. When connected the method inbound_node_connected is invoked.""" while not self.terminate_flag.is_set(): # Check whether the thread needs to be closed try: self.debug_print("Node: Wait for incoming connection") connection, client_address = self.sock.accept() # Basic information exchange (not secure) of the id's of the nodes! connected_node_id = connection.recv(4096).decode('utf-8') # When a node is connected, it sends it id! connection.send(self.id.encode('utf-8')) # Send my id to the connected node! thread_client = self.create_new_connection(connection, connected_node_id, client_address[0], client_address[1]) thread_client.start() self.nodes_inbound.append(thread_client) self.inbound_node_connected(thread_client) except socket.timeout: self.debug_print('Node: Connection timeout!') except Exception as e: raise e time.sleep(0.01) print("Node stopping...") for t in self.nodes_inbound: t.stop() for t in self.nodes_outbound: t.stop() time.sleep(1) for t in self.nodes_inbound: t.join() for t in self.nodes_outbound: t.join() self.sock.settimeout(None) self.sock.close() print("Node stopped") def outbound_node_connected(self, node): """This method is invoked when a connection with a outbound node was successfull. The node made the connection itself.""" self.debug_print("outbound_node_connected: " + node.id) if self.callback is not None: self.callback("outbound_node_connected", self, node, {}) def inbound_node_connected(self, node): """This method is invoked when a node successfully connected with us.""" self.debug_print("inbound_node_connected: " + node.id) if self.callback is not None: self.callback("inbound_node_connected", self, node, {}) def inbound_node_disconnected(self, node): """This method is invoked when a node, that was previously connected with us, is in a disconnected state.""" self.debug_print("inbound_node_disconnected: " + node.id) if self.callback is not None: self.callback("inbound_node_disconnected", self, node, {}) def outbound_node_disconnected(self, node): """This method is invoked when a node, that we have connected to, is in a disconnected state.""" self.debug_print("outbound_node_disconnected: " + node.id) if self.callback is not None: self.callback("outbound_node_disconnected", self, node, {}) def node_message(self, node, data): """This method is invoked when a node send us a message.""" self.debug_print("node_message: " + node.id + ": " + str(data)) if self.callback is not None: self.callback("node_message", self, node, data) def node_disconnect_with_outbound_node(self, node): """This method is invoked just before the connection is closed with the outbound node. From the node this request is created.""" self.debug_print("node wants to disconnect with oher outbound node: " + node.id) if self.callback is not None: self.callback("node_disconnect_with_outbound_node", self, node, {}) def node_request_to_stop(self): """This method is invoked just before we will stop. A request has been given to stop the node and close all the node connections. It could be used to say goodbey to everyone.""" self.debug_print("node is requested to stop!") if self.callback is not None: self.callback("node_request_to_stop", self, {}, {}) def __str__(self): return 'Node: {}:{}'.format(self.host, self.port) def __repr__(self): return '<Node {}:{} id: {}>'.format(self.host, self.port, self.id)
This article originally appeared at the Clarion Project: Almost 80 senators and representatives now support the Muslim Brotherhood Terrorist Designation Act, with three more congressmen coming onboard since the last update. Twenty-three have chosen to back the legislation since the Clarion Project letter campaign was launched. You can tell your representatives to support the legislation in less than one minute by using our online form. A total of 79 members of Congress are cosponsoring the bill or have voted in its favor. It now has bipartisan support and has passed the House Judiciary Committee. Four organizations representing persecuted Christians are also pushing for the bill. A prominent Kurdish activist also rooting the effort on. The three new congressmen are Jeff Miller (R-FL), Robert J. Dold (R-IL) and Carlos Curbelo (R-FL). The next steps are for it to come to a vote in the House of Representatives and to be approved by the Senate Foreign Relations Committee, where it is currently stalled. Regardless of whether it passes or not, the legislation is an unprecedented opportunity to educate congressmen about the Muslim Brotherhood’s involvement in terrorism. The legislation also outlines how the Brotherhood is linked to the Council on American-Islamic Relations (CAIR), the Islamic Society of North America (ISNA) and the North American Islamic Trust (NAIT). Read the full story at the Clarion Project.
<reponame>cheolgiseo/cheolgi<filename>tez-runtime-library/src/main/java/org/apache/tez/runtime/library/cartesianproduct/CartesianProductVertexManagerUnpartitioned.java /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.runtime.library.cartesianproduct; import com.google.common.primitives.Ints; import org.apache.tez.dag.api.EdgeManagerPluginDescriptor; import org.apache.tez.dag.api.EdgeProperty; import org.apache.tez.dag.api.UserPayload; import org.apache.tez.dag.api.VertexManagerPluginContext; import org.apache.tez.dag.api.VertexManagerPluginContext.ScheduleTaskRequest; import org.apache.tez.dag.api.event.VertexState; import org.apache.tez.dag.api.event.VertexStateUpdate; import org.apache.tez.runtime.api.TaskAttemptIdentifier; import org.roaringbitmap.RoaringBitmap; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import static org.apache.tez.dag.api.EdgeProperty.DataMovementType.CUSTOM; import static org.apache.tez.runtime.library.cartesianproduct.CartesianProductUserPayload.CartesianProductConfigProto; class CartesianProductVertexManagerUnpartitioned extends CartesianProductVertexManagerReal { List<String> sourceVertices; private int parallelism = 1; private boolean vertexReconfigured = false; private boolean vertexStarted = false; private boolean vertexStartSchedule = false; private int numCPSrcNotInConfigureState = 0; private int numBroadcastSrcNotInRunningState = 0; private int[] numTasks; private Queue<TaskAttemptIdentifier> completedSrcTaskToProcess = new LinkedList<>(); private Map<String, RoaringBitmap> sourceTaskCompleted = new HashMap<>(); private RoaringBitmap scheduledTasks = new RoaringBitmap(); private CartesianProductConfig config; public CartesianProductVertexManagerUnpartitioned(VertexManagerPluginContext context) { super(context); } @Override public void initialize(CartesianProductVertexManagerConfig config) throws Exception { sourceVertices = config.getSourceVertices(); numTasks = new int[sourceVertices.size()]; for (String vertex : getContext().getInputVertexEdgeProperties().keySet()) { if (sourceVertices.indexOf(vertex) != -1) { sourceTaskCompleted.put(vertex, new RoaringBitmap()); getContext().registerForVertexStateUpdates(vertex, EnumSet.of(VertexState.CONFIGURED)); numCPSrcNotInConfigureState++; } else { getContext().registerForVertexStateUpdates(vertex, EnumSet.of(VertexState.RUNNING)); numBroadcastSrcNotInRunningState++; } } this.config = config; getContext().vertexReconfigurationPlanned(); } @Override public synchronized void onVertexStarted(List<TaskAttemptIdentifier> completions) throws Exception { vertexStarted = true; if (completions != null) { for (TaskAttemptIdentifier attempt : completions) { addCompletedSrcTaskToProcess(attempt); } } tryScheduleTasks(); } @Override public synchronized void onVertexStateUpdated(VertexStateUpdate stateUpdate) throws IOException { String vertex = stateUpdate.getVertexName(); VertexState state = stateUpdate.getVertexState(); if (state == VertexState.CONFIGURED) { numTasks[sourceVertices.indexOf(vertex)] = getContext().getVertexNumTasks(vertex); numCPSrcNotInConfigureState--; } else if (state == VertexState.RUNNING) { numBroadcastSrcNotInRunningState--; } tryScheduleTasks(); } @Override public synchronized void onSourceTaskCompleted(TaskAttemptIdentifier attempt) throws Exception { addCompletedSrcTaskToProcess(attempt); tryScheduleTasks(); } private void addCompletedSrcTaskToProcess(TaskAttemptIdentifier attempt) { int taskId = attempt.getTaskIdentifier().getIdentifier(); String vertex = attempt.getTaskIdentifier().getVertexIdentifier().getName(); if (sourceVertices.indexOf(vertex) == -1) { return; } if (sourceTaskCompleted.get(vertex).contains(taskId)) { return; } sourceTaskCompleted.get(vertex).add(taskId); completedSrcTaskToProcess.add(attempt); } private boolean tryStartSchedule() { if (!vertexReconfigured || !vertexStarted || numBroadcastSrcNotInRunningState > 0) { return false; } for (RoaringBitmap bitmap: sourceTaskCompleted.values()) { if (bitmap.isEmpty()) { return false; } } vertexStartSchedule = true; return true; } private boolean tryReconfigure() throws IOException { if (numCPSrcNotInConfigureState > 0) { return false; } for (int numTask : numTasks) { parallelism *= numTask; } UserPayload payload = null; Map<String, EdgeProperty> edgeProperties = getContext().getInputVertexEdgeProperties(); Iterator<Map.Entry<String,EdgeProperty>> iter = edgeProperties.entrySet().iterator(); while (iter.hasNext()) { EdgeProperty edgeProperty = iter.next().getValue(); if (edgeProperty.getDataMovementType() != CUSTOM) { iter.remove(); continue; } EdgeManagerPluginDescriptor descriptor = edgeProperty.getEdgeManagerDescriptor(); if (payload == null) { CartesianProductConfigProto.Builder builder = CartesianProductConfigProto.newBuilder(); builder.setIsPartitioned(false).addAllNumTasks(Ints.asList(numTasks)) .addAllSourceVertices(config.getSourceVertices()); payload = UserPayload.create(ByteBuffer.wrap(builder.build().toByteArray())); } descriptor.setUserPayload(payload); } getContext().reconfigureVertex(parallelism, null, edgeProperties); vertexReconfigured = true; getContext().doneReconfiguringVertex(); return true; } private void tryScheduleTasks() throws IOException { if (!vertexReconfigured && !tryReconfigure()) { return; } if (!vertexStartSchedule && !tryStartSchedule()) { return; } while (!completedSrcTaskToProcess.isEmpty()) { scheduledTasksDependOnCompletion(completedSrcTaskToProcess.poll()); } } private void scheduledTasksDependOnCompletion(TaskAttemptIdentifier attempt) { int taskId = attempt.getTaskIdentifier().getIdentifier(); String vertex = attempt.getTaskIdentifier().getVertexIdentifier().getName(); List<ScheduleTaskRequest> requests = new ArrayList<>(); CartesianProductCombination combination = new CartesianProductCombination(numTasks, sourceVertices.indexOf(vertex)); combination.firstTaskWithFixedPartition(taskId); do { List<Integer> list = combination.getCombination(); boolean readyToSchedule = true; for (int i = 0; i < list.size(); i++) { if (!sourceTaskCompleted.get(sourceVertices.get(i)).contains(list.get(i))) { readyToSchedule = false; break; } } if (readyToSchedule && !scheduledTasks.contains(combination.getTaskId())) { requests.add(ScheduleTaskRequest.create(combination.getTaskId(), null)); scheduledTasks.add(combination.getTaskId()); } } while (combination.nextTaskWithFixedPartition()); if (!requests.isEmpty()) { getContext().scheduleTasks(requests); } } }
// NewMockServerVersionClient creates a new mock instance. func NewMockServerVersionClient(ctrl *gomock.Controller) *MockServerVersionClient { mock := &MockServerVersionClient{ctrl: ctrl} mock.recorder = &MockServerVersionClientMockRecorder{mock} return mock }
/** * <pre> * Set to true to fetch events for the transaction at this version * </pre> * * <code>bool fetch_events = 3;</code> */ public Builder clearFetchEvents() { fetchEvents_ = false; onChanged(); return this; }
/** * this method only dispatch btnClicked action, don't write another * logic in this method. all method should be use Action as suffix w/o * or with btnClick as parameter as neccersy. * * @param btnClicked * @return */ public String createAction (String btnClicked) { if (btnClicked == null || btnClicked.trim().equals("")) return null; String methodName; if ("resetDialog".equals(btnClicked)) { methodName = "resetDialog" + ACTION_NAME; } else if (btnClicked.endsWith("Dialog")) { methodName = "popupDialog" + ACTION_NAME; } else if (btnClicked.startsWith("rspModule")) { methodName = "changeRSPModule" + ACTION_NAME; } else { methodName = btnClicked.replaceAll(UNDER_LINE, ""); if (methodName.endsWith(ACTION_NAME)) {} else { methodName = methodName + ACTION_NAME; } } if (btnClicked.startsWith("refresh")) { String s = btnClicked.replace("refresh", ""); if (s.length() >= 1) { s = s.substring(0, 1).toLowerCase() + s.substring(1); this.addRefreshZone(s + "Zone"); } } if (methodName.charAt(1) >= 'A' && methodName.charAt(1) <= 'Z') { return methodName; } return methodName.substring(0, 1).toLowerCase() + methodName.substring(1); }
package t; /** * Created by hanzhihua on 2016/12/2. */ public enum UserType { A; }
<reponame>AlphaGoMK/SEP package sep.Entity; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.query.Query; import javax.persistence.criteria.CriteriaBuilder; import java.util.Date; import java.util.List; public class TeacherDAO { public static List<Teacher> getTeacherList() { Session sess = HibernateInit.getSession(); Transaction tx = null; try { tx = sess.beginTransaction(); List<Teacher> teacherList = sess.createQuery("FROM Teacher ").list(); tx.commit(); return teacherList; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return null; } finally { sess.close(); } } public static boolean deleteTeacher(int teacherId) { Session sess = HibernateInit.getSession(); Transaction tx = null; try { tx = sess.beginTransaction(); Teacher teacher = (Teacher)sess.get(Teacher.class, teacherId); sess.delete(teacher); tx.commit(); return true; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return false; } finally { sess.close(); } } public static Teacher addTeacher(String teacherName){ Session sess = HibernateInit.getSession(); Transaction tx = null; try { tx = sess.beginTransaction(); Teacher teacher = new Teacher(teacherName); Integer teacherId = (Integer)sess.save(teacher); // Auto create id teacher.setTeacherId(teacherId); tx.commit(); return teacher; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return null; } finally { sess.close(); } } public static boolean addCourse(Integer teacherId, Integer courseId) { Session sess = HibernateInit.getSession(); Transaction tx = null; try { tx = sess.beginTransaction(); Teacher teacher = (Teacher)sess.get(Teacher.class, teacherId); teacher.addCourse(courseId); sess.update(teacher); tx.commit(); return true; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return false; } finally { sess.close(); } } public static Teacher getTeacherbyId(Integer teacherId) { Session sess = HibernateInit.getSession(); Transaction tx = null; try { tx = sess.beginTransaction(); Teacher teacher = (Teacher)sess.get(Teacher.class, teacherId); sess.update(teacher); tx.commit(); return teacher; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return null; } finally { sess.close(); } } public static boolean checkExistence(Integer teacherId, String password) { Session sess = HibernateInit.getSession(); Transaction tx = null; try { tx = sess.beginTransaction(); Teacher teacher = (Teacher)sess.get(Teacher.class, teacherId); if (teacher == null) { return false; } else { System.out.println(teacher.getPassword()); if (teacher.getPassword().equals(password)) { return true; } } return false; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return false; } finally { sess.close(); } } public static boolean scoreHomework(Integer submitId, double score) { Session sess = HibernateInit.getSession(); Transaction tx = null; try{ tx = sess.beginTransaction(); MySubmit ms = (MySubmit)sess.get(MySubmit.class, submitId); GroupDAO.addScore(ms.getGrpId(), ms.getHomeworkname(), score); return false; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return false; } finally { sess.close(); } } public static List<MySubmit> searchSubmission(Integer courseId, String hwName, Integer grpId, Date startTime, Date endTime) { Session sess = HibernateInit.getSession(); Transaction tx = null; try{ tx = sess.beginTransaction(); String hql = "FROM MySubmit m WHERE m.courseId=" + courseId; boolean has_startTime = (startTime != null); boolean has_endTime = (endTime != null); if (!hwName.equals("") || grpId!=null || has_startTime || has_endTime) { if (!hwName.equals("")) { hql += " AND"; hql += " m.homeworkname=:hwName"; } if (grpId != null) { hql += " AND"; hql += " m.grpId = " + grpId.toString(); } if (has_startTime) { hql += " AND"; hql += " m.date >= ?0"; } if (has_endTime) { has_endTime = true; hql += " AND"; hql += " m.date <= ?1"; } } Query q = sess.createQuery(hql); if (!hwName.equals("")) { q.setString("hwName", hwName); } if (has_startTime){ q.setDate(0, startTime); } if (has_endTime) { q.setDate(1, endTime); } List<MySubmit> ms = q.list(); return ms; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return null; } finally { sess.close(); } } public static boolean changePassword(Integer teacherId, String passwd){ Session sess = HibernateInit.getSession(); Transaction tx = null; try{ tx = sess.beginTransaction(); Teacher t = (Teacher)sess.get(Teacher.class, teacherId); t.setPassword(<PASSWORD>); sess.update(t); tx.commit(); return true; } catch (HibernateException e) { if (tx != null) tx.rollback(); e.printStackTrace(); return false; } finally { sess.close(); } } }
#include <stdio.h> #define MAX 100000 main (){ int n; long long num[MAX]={0}; int i,j; long long minodd=99999999999LL ; long long sum=0; long long ans; scanf ("%d",&n); for (i=0;i<n;i++){ scanf("%lld",&num[i]); } for (i=0;i<n;++i) sum=sum+num[i]; if (sum%2==0) ans=sum; else{ for (i=0;i<n;++i){ if (num[i]%2==0) continue; else if(num[i]<minodd) minodd=num[i]; else continue; } ans=sum-minodd; } printf("%lld\n",ans); }
<filename>timus/vol8/1785.go package main import "fmt" func main() { var n int fmt.Scanf("%d", &n) var s string switch { case n < 5: s = "few" case n < 10: s = "several" case n < 20: s = "pack" case n < 50: s = "lots" case n < 100: s = "horde" case n < 250: s = "throng" case n < 500: s = "swarm" case n < 1000: s = "zounds" default: s = "legion" } fmt.Println(s) }
module Lib ( factor , fibs , primes , triangulars , factorials , arithmeticProgressionSum , squareNumbersSum , digits , number , slices , divisors , properDivisors , perfect , abundant , deficient , words' , shuffle ) where import Data.List as L import System.Random as Random -- |Predicate to verify if first argument is a factor of a second one -- (that is, if second argument divides by first without remainder) factor :: Int -> Int -> Bool x `factor` n = n `mod` x == 0 -- |Fibonaci inifinite sequence fibs :: Integral a => [a] fibs = 0 : 1 : zipWith (+) fibs (tail fibs) -- |Prime numbers list up to given max value primes :: Int -> [Int] primes cap = sieve [2..cap] where sieve (x:xs) = x : (sieve $ filter (not.(factor x)) xs) sieve [] = [] -- |Triangular numbers infinite sequence triangulars :: [Int] triangulars = [ sum [1..n] | n <- [1..] ] -- |Factorials factorials :: [Integer] factorials = [ product [1..n] | n <- [1..] ] -- |Sum of the arithmetic progression a1..an with n members arithmeticProgressionSum :: Int -> Int -> Int -> Int arithmeticProgressionSum a1 an n = (a1+an)*n `div` 2 -- |Sum of the sequnce of squares of natural numbers from 0 to n -- 1^2 + 2^2 + ... + n^2 squareNumbersSum :: Int -> Int squareNumbersSum n = n * (n+1) * (2*n+1) `div` 6 -- |List of digits representing a given number (inverse of `number`) -- digits 123 = [1, 2, 3] digits :: Integral a => a -> [a] digits 0 = [0] digits n = digits' n [] where digits' 0 xs = xs digits' n xs = digits' (n `div` 10) (n `mod` 10 : xs) -- |A number represented by a given list of digits (inverse of `digits`) -- number [1, 2, 3] = 123 number :: Integral a => [a] -> a number xs = number' xs 0 where number' [] n = n number' (x:xs) n = number' xs (n*10+x) -- |Create slices of given size out a list -- slice [1,2,3,4] 2 == [ [1,2], [2,3], [3,4] ] slices :: [a] -> Int -> [[a]] slices xs size | length xs > size = (take size xs) : slices (drop 1 xs) size | otherwise = [xs] -- |Divisors of a number -- uses trial division divisors :: Int -> [Int] divisors n = let cap = ceiling.sqrt.fromIntegral $ n xs = filter (`factor` n) [1..cap] in L.sort $ L.nub $ map (n `div`) xs ++ xs -- |Proper divisors of a number -- (same as divisors but excludinng the number itself) properDivisors :: Int -> [Int] properDivisors = init' . divisors where init' [] = [] init' xs = init xs -- |Predicate to verify if given number is perfect perfect :: Int -> Bool perfect n = (sum . properDivisors $ n) == n -- |Predicate to verify if given number is abundant abundant :: Int -> Bool abundant n = (sum . properDivisors $ n) > n -- |Predicate to verify if given number is deficient deficient :: Int -> Bool deficient n = (sum . properDivisors $ n) < n -- |Same as `words` but using custom predicate to determine start of new word -- Derived from `Data.List.words` implementation words' :: (Char -> Bool) -> String -> [String] words' p s = case dropWhile p s of "" -> [] s' -> w : words' p s'' where (w, s'') = break p s' -- |Shuffle randomly a list of items given number of times shuffle :: Random.RandomGen g => g -> [a] -> Int -> ([a], g) shuffle g xs n = (iterate splitOnce (xs, g)) !! n where splitOnce :: Random.RandomGen g => ([a], g) -> ([a], g) splitOnce (xs, g) = let (i, g') = Random.randomR (1, length xs) g (inits, tails) = L.splitAt i xs xs' = tails ++ inits in (xs', g')
def limit_column_characters(df, column_length: int, col_separator: str = "_"): check("column_length", column_length, [int]) check("col_separator", col_separator, [str]) col_names = df.columns col_names = [col_name[:column_length] for col_name in col_names] col_name_set = set(col_names) col_name_count = dict() if len(col_name_set) == len(col_names): df.columns = col_names return df for col_name_to_check in col_name_set: count = 0 for idx, col_name in enumerate(col_names): if col_name_to_check == col_name: col_name_count[idx] = count count += 1 final_col_names = [] for idx, col_name in enumerate(col_names): if col_name_count[idx] > 0: col_name_to_append = ( col_name + col_separator + str(col_name_count[idx]) ) final_col_names.append(col_name_to_append) else: final_col_names.append(col_name) df.columns = final_col_names return df
<reponame>alimy/gin // Copyright 2018 Gin Core Team. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. // +build go1.7 package gin import ( "time" "github.com/alimy/gin/render" ) // PureJSON serializes the given struct as JSON into the response body. // PureJSON, unlike JSON, does not replace special html characters with their unicode entities. func (c *Context) PureJSON(code int, obj interface{}) { c.RenderWith(render.PureJSONRenderType, code, obj) } /************************************/ /***** GOLANG.ORG/X/NET/CONTEXT *****/ /************************************/ // Deadline returns the time when work done on behalf of this context // should be canceled. Deadline returns ok==false when no deadline is // set. Successive calls to Deadline return the same results. func (c *Context) Deadline() (time.Time, bool) { return c.Request.Context().Deadline() } // Done returns a channel that's closed when work done on behalf of this // context should be canceled. Done may return nil if this context can // never be canceled. Successive calls to Done return the same value. func (c *Context) Done() <-chan struct{} { return c.Request.Context().Done() } // Err returns a non-nil error value after Done is closed, // successive calls to Err return the same error. // If Done is not yet closed, Err returns nil. // If Done is closed, Err returns a non-nil error explaining why: // Canceled if the context was canceled // or DeadlineExceeded if the context's deadline passed. func (c *Context) Err() error { return c.Request.Context().Err() }
/** * Computes the correct dialog size for the given page and resizes its shell * if nessessary. * * @param page * the wizard page */ private void updateSizeForPage(IWizardPage page) { Point delta = calculatePageSizeDelta(page); if (delta.x > 0 || delta.y > 0) { Shell shell = getShell(); Point shellSize = shell.getSize(); setShellSize(shellSize.x + delta.x, shellSize.y + delta.y); constrainShellSize(); } }
<gh_stars>0 export * from './models/index'; export * from './services/index'; export * from './store/index'; export * from './sites.module';
<filename>src/build/prepareElectronApp.ts import * as crypto from 'crypto'; import * as fs from 'fs-extra'; import * as path from 'path'; import * as log from 'loglevel'; import { copyFileOrDir, generateRandomSuffix } from '../helpers/helpers'; import { AppOptions, OutputOptions, PackageJSON, } from '../../shared/src/options/model'; import { parseJson } from '../utils/parseUtils'; import { DEFAULT_APP_NAME } from '../constants'; /** * Only picks certain app args to pass to nativefier.json */ function pickElectronAppArgs(options: AppOptions): OutputOptions { return { accessibilityPrompt: options.nativefier.accessibilityPrompt, alwaysOnTop: options.nativefier.alwaysOnTop, appBundleId: options.packager.appBundleId, appCategoryType: options.packager.appCategoryType, appCopyright: options.packager.appCopyright, appVersion: options.packager.appVersion, arch: options.packager.arch, asar: options.packager.asar, backgroundColor: options.nativefier.backgroundColor, basicAuthPassword: <PASSWORD>, basicAuthUsername: options.nativefier.basicAuthUsername, blockExternalUrls: options.nativefier.blockExternalUrls, bounce: options.nativefier.bounce, browserwindowOptions: options.nativefier.browserwindowOptions, buildDate: new Date().getTime(), buildVersion: options.packager.buildVersion, clearCache: options.nativefier.clearCache, counter: options.nativefier.counter, crashReporter: options.nativefier.crashReporter, darwinDarkModeSupport: options.packager.darwinDarkModeSupport, derefSymlinks: options.packager.derefSymlinks, disableContextMenu: options.nativefier.disableContextMenu, disableDevTools: options.nativefier.disableDevTools, disableGpu: options.nativefier.disableGpu, disableOldBuildWarning: options.nativefier.disableOldBuildWarning, diskCacheSize: options.nativefier.diskCacheSize, download: options.packager.download, electronVersionUsed: options.packager.electronVersion, enableEs3Apis: options.nativefier.enableEs3Apis, executableName: options.packager.executableName, fastQuit: options.nativefier.fastQuit, fileDownloadOptions: options.nativefier.fileDownloadOptions, flashPluginDir: options.nativefier.flashPluginDir, fullScreen: options.nativefier.fullScreen, globalShortcuts: options.nativefier.globalShortcuts, height: options.nativefier.height, helperBundleId: options.packager.helperBundleId, hideWindowFrame: options.nativefier.hideWindowFrame, ignoreCertificate: options.nativefier.ignoreCertificate, ignoreGpuBlacklist: options.nativefier.ignoreGpuBlacklist, insecure: options.nativefier.insecure, internalUrls: options.nativefier.internalUrls, isUpgrade: options.packager.upgrade, junk: options.packager.junk, lang: options.nativefier.lang, maximize: options.nativefier.maximize, maxHeight: options.nativefier.maxHeight, maxWidth: options.nativefier.maxWidth, minHeight: options.nativefier.minHeight, minWidth: options.nativefier.minWidth, name: options.packager.name ?? DEFAULT_APP_NAME, nativefierVersion: options.nativefier.nativefierVersion, osxNotarize: options.packager.osxNotarize, osxSign: options.packager.osxSign, portable: options.packager.portable, processEnvs: options.nativefier.processEnvs, protocols: options.packager.protocols, proxyRules: options.nativefier.proxyRules, prune: options.packager.prune, quiet: options.packager.quiet, showMenuBar: options.nativefier.showMenuBar, singleInstance: options.nativefier.singleInstance, targetUrl: options.packager.targetUrl, titleBarStyle: options.nativefier.titleBarStyle, tray: options.nativefier.tray, trayMenu: options.nativefier.trayMenu, usageDescription: options.packager.usageDescription, userAgent: options.nativefier.userAgent, userAgentHonest: options.nativefier.userAgentHonest, versionString: options.nativefier.versionString, width: options.nativefier.width, widevine: options.nativefier.widevine, win32metadata: options.packager.win32metadata, x: options.nativefier.x, y: options.nativefier.y, zoom: options.nativefier.zoom, // OLD_BUILD_WARNING_TEXT is an undocumented env. var to let *packagers* // tweak the message shown on warning about an old build, to something // more tailored to their audience (who might not even know Nativefier). // See https://github.com/kelyvin/Google-Messages-For-Desktop/issues/34#issuecomment-812731144 // and https://github.com/nativefier/nativefier/issues/1131#issuecomment-812646988 oldBuildWarningText: process.env.OLD_BUILD_WARNING_TEXT || '', }; } async function maybeCopyScripts( srcs: string[] | undefined, dest: string, ): Promise<void> { if (!srcs || srcs.length === 0) { log.debug('No files to inject, skipping copy.'); return; } const supportedInjectionExtensions = ['.css', '.js']; log.debug(`Copying ${srcs.length} files to inject in app.`); for (const src of srcs) { if (!fs.existsSync(src)) { throw new Error( `File ${src} not found. Note that Nativefier expects *local* files, not URLs.`, ); } if (supportedInjectionExtensions.indexOf(path.extname(src)) < 0) { log.warn('Skipping unsupported injection file', src); continue; } const postFixHash = generateRandomSuffix(); const destFileName = `inject-${postFixHash}${path.extname(src)}`; const destPath = path.join(dest, 'inject', destFileName); log.debug(`Copying injection file "${src}" to "${destPath}"`); await fs.copy(src, destPath); } } /** * Use a basic 6-character hash to prevent collisions. The hash is deterministic url & name, * so that an upgrade (same URL) of an app keeps using the same appData folder. * Warning! Changing this normalizing & hashing will change the way appNames are generated, * changing appData folder, and users will get logged out of their apps after an upgrade. */ export function normalizeAppName(appName: string, url: string): string { const hash = crypto.createHash('md5'); hash.update(url); const postFixHash = hash.digest('hex').substring(0, 6); const normalized = appName .toLowerCase() .replace(/[,:.]/g, '') .replace(/[\s_]/g, '-'); return `${normalized}-nativefier-${postFixHash}`; } function changeAppPackageJsonName( appPath: string, name: string, url: string, ): string { const packageJsonPath = path.join(appPath, '/package.json'); const packageJson = parseJson<PackageJSON>( fs.readFileSync(packageJsonPath).toString(), ); if (!packageJson) { throw new Error(`Could not load package.json from ${packageJsonPath}`); } const normalizedAppName = normalizeAppName(name, url); packageJson.name = normalizedAppName; log.debug(`Updating ${packageJsonPath} 'name' field to ${normalizedAppName}`); fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2)); return normalizedAppName; } /** * Creates a temporary directory, copies the './app folder' inside, * and adds a text file with the app configuration. */ export async function prepareElectronApp( src: string, dest: string, options: AppOptions, ): Promise<void> { log.debug(`Copying electron app from ${src} to ${dest}`); try { await fs.copy(src, dest); } catch (err: unknown) { throw `Error copying electron app from ${src} to temp dir ${dest}. Error: ${ (err as Error).message }`; } const appJsonPath = path.join(dest, '/nativefier.json'); const pickedOptions = pickElectronAppArgs(options); log.debug(`Writing app config to ${appJsonPath}`, pickedOptions); await fs.writeFile(appJsonPath, JSON.stringify(pickedOptions)); if (options.nativefier.bookmarksMenu) { const bookmarksJsonPath = path.join(dest, '/bookmarks.json'); try { await fs.copy(options.nativefier.bookmarksMenu, bookmarksJsonPath); } catch (err: unknown) { log.error('Error copying bookmarks menu config file.', err); } } if (options.nativefier.trayMenu) { const trayMenuScriptPath = path.join(dest, '/traymenu.json'); try { await copyFileOrDir(options.nativefier.trayMenu, trayMenuScriptPath); } catch (err: unknown) { log.error('Error copying tray menu script file.', err); } } try { await maybeCopyScripts(options.nativefier.inject, dest); } catch (err: unknown) { log.error('Error copying injection files.', err); } const normalizedAppName = changeAppPackageJsonName( dest, options.packager.name as string, options.packager.targetUrl, ); options.packager.appBundleId = `com.electron.nativefier.${normalizedAppName}`; }
/** * For processing the countAll method using an all neighbour cell comparison. This doubles the * number of distance comparisons but does not require synchronisation. */ private class CountWorker2 implements Runnable { final int[][] results; final int[] process; final int from; final int to; CountWorker2(int[][] results, int[] process, int from, int to) { this.results = results; this.process = process; this.from = from; this.to = to; } @Override public void run() { final int[] neighbours = new int[8]; for (int index = from; index < to; index++) { final int i = process[index]; final IndexMolecule[] cell1 = grid[i]; final int neighbourCount = getNeighbours8(neighbours, i); for (int j = cell1.length; j-- > 0;) { final IndexMolecule m1 = cell1[j]; final int[] count1 = results[m1.index]; final float x = m1.getX(); final float y = m1.getY(); // Compare all inside the bin. This will self-count for (int k = cell1.length; k-- > 0;) { final IndexMolecule m2 = cell1[k]; if (distance2(x, y, m2) < r2) { count1[m2.getId()]++; } } // Compare to neighbours for (int c = neighbourCount; c-- > 0;) { final IndexMolecule[] cell2 = grid[neighbours[c]]; for (int k = cell2.length; k-- > 0;) { final IndexMolecule m2 = cell2[k]; if (distance2(x, y, m2) < r2) { count1[m2.getId()]++; } } } } } } }
<filename>src/api/x11/ffi.rs pub use x11_dl::keysym::*; pub use x11_dl::xcursor::*; pub use x11_dl::xf86vmode::*; pub use x11_dl::xlib::*; pub use x11_dl::xinput::*; pub use x11_dl::xinput2::*; pub use x11_dl::error::OpenError; pub use self::glx::types::GLXContext; /// GLX bindings pub mod glx { include!(concat!(env!("OUT_DIR"), "/glx_bindings.rs")); } /// Functions that are not necessarly always available pub mod glx_extra { include!(concat!(env!("OUT_DIR"), "/glx_extra_bindings.rs")); }
/** * Test class for the {@link ProxyController}. * * @author Cyrille Chopelet */ @RunWith(SpringRunner.class) @SpringBootTest(classes = ProxyControllerTestConfiguration.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT) @AutoConfigureMockMvc public class ProxyControllerTest { @Autowired private MockMvc mvc; @SpyBean private ProxiedController spyController; @Test public void testGetStaticResource() throws Exception { mvc.perform(MockMvcRequestBuilders.get("/index.html").accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.TEXT_HTML)); } @Test public void testGetWithoutParameter() throws Exception { mvc.perform(MockMvcRequestBuilders.get("/proxy/1/greeting/Hello").accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andExpect(jsonPath("$.proxied.message", is("Hello, World!"))); } @Test public void testGetWithParameter() throws Exception { mvc.perform(MockMvcRequestBuilders.get("/proxy/1/greeting/Hello?name=Test").accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andExpect(jsonPath("$.proxied.message", is("Hello, Test!"))); } @Test public void testSingleIteration() throws Exception { mvc.perform(MockMvcRequestBuilders.get("/proxy/1/greeting/Hello").accept(MediaType.APPLICATION_JSON)); verify(spyController, times(1)).greet(any(String.class), any(String.class)); } @Test public void testMultipleIterations() throws Exception { mvc.perform(MockMvcRequestBuilders.get("/proxy/3/greeting/Hello").accept(MediaType.APPLICATION_JSON)); verify(spyController, times(3)).greet(any(String.class), any(String.class)); } @Test public void testAtLeastOneIteration() throws Exception { mvc.perform(MockMvcRequestBuilders.get("/proxy/0/greeting/Hello").accept(MediaType.APPLICATION_JSON)); verify(spyController, times(1)).greet(any(String.class), any(String.class)); } // FIXME enhance proxy to correctly transmit the http status @Test @Ignore public void testProxifyingNotExistingUrl() throws Exception { mvc.perform(MockMvcRequestBuilders.get("/proxy/1/im-not-there").accept(MediaType.APPLICATION_JSON)) .andExpect(status().isNotFound()); } }
<reponame>g-logunov/polars<filename>polars/polars-lazy/src/physical_plan/expressions/literal.rs<gh_stars>1-10 use crate::physical_plan::state::ExecutionState; use crate::physical_plan::PhysicalAggregation; use crate::prelude::*; use polars_core::frame::groupby::GroupTuples; use polars_core::prelude::*; use polars_core::utils::NoNull; use std::borrow::Cow; use std::ops::Deref; pub struct LiteralExpr(pub LiteralValue, Expr); impl LiteralExpr { pub fn new(value: LiteralValue, expr: Expr) -> Self { Self(value, expr) } } impl PhysicalExpr for LiteralExpr { fn as_expression(&self) -> &Expr { &self.1 } fn evaluate(&self, _df: &DataFrame, _state: &ExecutionState) -> Result<Series> { use LiteralValue::*; let s = match &self.0 { #[cfg(feature = "dtype-i8")] Int8(v) => Int8Chunked::full("literal", *v, 1).into_series(), #[cfg(feature = "dtype-i16")] Int16(v) => Int16Chunked::full("literal", *v, 1).into_series(), Int32(v) => Int32Chunked::full("literal", *v, 1).into_series(), Int64(v) => Int64Chunked::full("literal", *v, 1).into_series(), #[cfg(feature = "dtype-u8")] UInt8(v) => UInt8Chunked::full("literal", *v, 1).into_series(), #[cfg(feature = "dtype-u16")] UInt16(v) => UInt16Chunked::full("literal", *v, 1).into_series(), UInt32(v) => UInt32Chunked::full("literal", *v, 1).into_series(), UInt64(v) => UInt64Chunked::full("literal", *v, 1).into_series(), Float32(v) => Float32Chunked::full("literal", *v, 1).into_series(), Float64(v) => Float64Chunked::full("literal", *v, 1).into_series(), Boolean(v) => BooleanChunked::full("literal", *v, 1).into_series(), Null => BooleanChunked::new("literal", &[None]).into_series(), Range { low, high, data_type, } => match data_type { DataType::Int32 => { let low = *low as i32; let high = *high as i32; let ca: NoNull<Int32Chunked> = (low..high).collect(); ca.into_inner().into_series() } DataType::Int64 => { let low = *low as i64; let high = *high as i64; let ca: NoNull<Int64Chunked> = (low..high).collect(); ca.into_inner().into_series() } DataType::UInt32 => { if *low >= 0 || *high <= u32::MAX as i64 { return Err(PolarsError::ComputeError( "range not within bounds of u32 type".into(), )); } let low = *low as u32; let high = *high as u32; let ca: NoNull<UInt32Chunked> = (low..high).collect(); ca.into_inner().into_series() } dt => { return Err(PolarsError::InvalidOperation( format!("datatype {:?} not supported as range", dt).into(), )) } }, Utf8(v) => Utf8Chunked::full("literal", v, 1).into_series(), #[cfg(all(feature = "temporal", feature = "dtype-datetime"))] DateTime(ndt) => { use polars_core::chunked_array::temporal::conversion::*; let timestamp = naive_datetime_to_datetime(ndt); Int64Chunked::full("literal", timestamp, 1) .into_date() .into_series() } Series(series) => series.deref().clone(), }; Ok(s) } #[allow(clippy::ptr_arg)] fn evaluate_on_groups<'a>( &self, df: &DataFrame, groups: &'a GroupTuples, state: &ExecutionState, ) -> Result<AggregationContext<'a>> { let s = self.evaluate(df, state)?; Ok(AggregationContext::new(s, Cow::Borrowed(groups), false)) } fn to_field(&self, _input_schema: &Schema) -> Result<Field> { use LiteralValue::*; let name = "literal"; let field = match &self.0 { #[cfg(feature = "dtype-i8")] Int8(_) => Field::new(name, DataType::Int8), #[cfg(feature = "dtype-i16")] Int16(_) => Field::new(name, DataType::Int16), Int32(_) => Field::new(name, DataType::Int32), Int64(_) => Field::new(name, DataType::Int64), #[cfg(feature = "dtype-u8")] UInt8(_) => Field::new(name, DataType::UInt8), #[cfg(feature = "dtype-u16")] UInt16(_) => Field::new(name, DataType::UInt16), UInt32(_) => Field::new(name, DataType::UInt32), UInt64(_) => Field::new(name, DataType::UInt64), Float32(_) => Field::new(name, DataType::Float32), Float64(_) => Field::new(name, DataType::Float64), Boolean(_) => Field::new(name, DataType::Boolean), Utf8(_) => Field::new(name, DataType::Utf8), Null => Field::new(name, DataType::Null), Range { data_type, .. } => Field::new(name, data_type.clone()), #[cfg(all(feature = "temporal", feature = "dtype-datetime"))] DateTime(_) => Field::new(name, DataType::Datetime), Series(s) => s.field().into_owned(), }; Ok(field) } fn as_agg_expr(&self) -> Result<&dyn PhysicalAggregation> { Ok(self) } } impl PhysicalAggregation for LiteralExpr { fn aggregate( &self, df: &DataFrame, _groups: &GroupTuples, state: &ExecutionState, ) -> Result<Option<Series>> { PhysicalExpr::evaluate(self, df, state).map(Some) } }
from PIL import Image, ImageDraw s=72 images = [] for i in range(0,360,5): im = Image.open('k-means/pca_example/tran_pca6d/pca3_PCA3d_angle_'+str(i)+'.jpg') im =im.resize(size=(512, 512), resample=Image.NEAREST) #- NEAREST - BOX - BILINEAR - HAMMING - BICUBIC - LANCZOS images.append(im) images[0].save('./k-means/pca_example/tran_pca6d_pca3d512_72_n360.gif', save_all=True, append_images=images[1:s], duration=100*2, loop=0)
use std::fmt; use bitflags::bitflags; use crate::error::{PageMapError, Result}; /////////////////////////////////////////////////////////////////////////////////////////////////// // // VirtualMemoryArea // /////////////////////////////////////////////////////////////////////////////////////////////////// /// A region of virtual memory, defined by the first and the next-to-last addresses that it /// includes. #[derive(Debug, Default, Clone, Copy)] pub struct VirtualMemoryArea { pub(crate) start: u64, pub(crate) end: u64, } impl VirtualMemoryArea { /// Returns the first address included in the virtual memory area. #[inline(always)] pub fn start_address(&self) -> u64 { self.start } /// Returns the last address included in the virtual memory area. #[inline(always)] pub fn last_address(&self) -> u64 { self.end - 1 } /// Returns the size of the virtual memory area, in bytes. #[inline(always)] pub fn size(&self) -> u64 { self.end - self.start } /// Returns `true` if the given address falls within the virtual memory area, or `false` /// otherwise. #[inline(always)] pub fn contains(&self, addr: u64) -> bool { addr >= self.start && addr < self.end } } //impl std::convert::TryFrom<(u64, u64)> for VirtualMemoryArea { // type Error = // TODO: Define own error types // // fn try_from(r: (u64, u64)) -> Result<Self, Self::Error> { // // } //} impl std::convert::From<(u64, u64)> for VirtualMemoryArea { fn from(r: (u64, u64)) -> Self { debug_assert!(r.0 < r.1); // TODO: TryFrom VirtualMemoryArea { start: r.0, end: r.1, } } } impl std::str::FromStr for VirtualMemoryArea { type Err = PageMapError; fn from_str(s: &str) -> Result<Self> { let r: Vec<_> = s .splitn(2, '-') .map(|addr| u64::from_str_radix(addr, 16).map_err(PageMapError::ParseVirtualMemoryArea)) .collect::<Result<_>>()?; Ok((r[0], r[1]).into()) } } impl fmt::Display for VirtualMemoryArea { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "0x{:016x}-0x{:016x} ({:5}K)", self.start, self.end, self.size() / 1024, ) } } /////////////////////////////////////////////////////////////////////////////////////////////////// // // PagePermissions // /////////////////////////////////////////////////////////////////////////////////////////////////// bitflags! { /// The permissions that a page may have. #[derive(Default)] pub struct PagePermissions: u8 { /// Permission to be read. const READ = 1 << 0; /// Permission to be written. const WRITE = 1 << 1; /// Permission to be executed. const EXECUTE = 1 << 2; /// A page can be `shared` or `private` (i.e., copy-on-write). const SHARED = 1 << 3; } } impl std::str::FromStr for PagePermissions { type Err = PageMapError; // FIXME: This is a quick and dirty implementation, which would currently allow weird patterns, // like '----', 'wprx', etc. fn from_str(s: &str) -> Result<Self> { if s.len() != 4 { return Err(PageMapError::ParsePagePermissions(s.into())); } let mut ret: PagePermissions = Default::default(); for c in s.bytes() { ret |= match c { b'r' => Self::READ, b'w' => Self::WRITE, b'x' => Self::EXECUTE, b's' => Self::SHARED, b'p' | b'-' => Self::empty(), _ => return Err(PageMapError::ParsePagePermissions(s.into())), } } //if ret.is_empty() { Err(PageMapError::ParsePagePermissions(s.into())) } else { Ok(ret) } Ok(ret) } } impl fmt::Display for PagePermissions { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut ret = "---p".to_owned(); if self.contains(Self::READ) { ret.replace_range(0..1, "r"); } if self.contains(Self::WRITE) { ret.replace_range(1..2, "w"); } if self.contains(Self::EXECUTE) { ret.replace_range(2..3, "x"); } if self.contains(Self::SHARED) { ret.replace_range(3..4, "s"); } write!(f, "{}", ret) } } /////////////////////////////////////////////////////////////////////////////////////////////////// // // DeviceNumbers // /////////////////////////////////////////////////////////////////////////////////////////////////// /// Major and minor numbers of a file. #[derive(Debug, Default, Clone, Copy)] pub struct DeviceNumbers { major: u16, // major: u12 minor: u32, // minor: u20 } impl DeviceNumbers { /// Retrieve the major number. #[inline(always)] pub fn major(&self) -> u16 { self.major } /// Retrieve the minor number. #[inline(always)] pub fn minor(&self) -> u32 { self.minor } } impl std::convert::From<(u32, u32)> for DeviceNumbers { fn from(p: (u32, u32)) -> Self { debug_assert!(p.0 < 1 << 12); // TODO: TryFrom debug_assert!(p.1 < 1 << 20); // TODO: TryFrom DeviceNumbers { major: p.0 as u16, minor: p.1, } } } impl std::str::FromStr for DeviceNumbers { type Err = PageMapError; fn from_str(s: &str) -> Result<Self> { let p: Vec<_> = s .splitn(2, ':') .map(|addr| { u32::from_str_radix(addr, 16) .map_err(|_| PageMapError::ParseDeviceNumbers(s.into())) }) .collect::<Result<_>>()?; Ok((p[0], p[1]).into()) } } impl fmt::Display for DeviceNumbers { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}:{}", self.major, self.minor) } } /////////////////////////////////////////////////////////////////////////////////////////////////// // // MapsEntry // /////////////////////////////////////////////////////////////////////////////////////////////////// /// A memory mapping of a process, parsed from `/proc/<PID>/maps`. #[derive(Debug, Default, Clone)] pub struct MapsEntry { /// The virtual memory area that the mapping concerns. pub(crate) vma: VirtualMemoryArea, /// The possible ways that pages in the virtual memory area are allowed to be accessed. perms: PagePermissions, /// The offset in the file backing the mapping (if any) where the mapping begins. offset: u64, /// The major and minor numbers of the file backing the mapping, if any. dev: DeviceNumbers, /// The inode of the file backing the mapping, if any. inode: u64, /// The name of the file backing the mapping (if any), or a pseudo-path, as described in /// [`procfs(5)`]. /// /// [`procfs(5)`]: https://man7.org/linux/man-pages/man5/proc.5.html pathname: Option<String>, } impl MapsEntry { /// Retrieve the virtual memory area of the mapping. #[inline(always)] pub fn vma(&self) -> VirtualMemoryArea { self.vma } /// Retrieve the permissions for the pages of the particular mapping, which dictate the /// possible ways that the pages of the mapping are allowed to be accessed. #[inline(always)] pub fn permissions(&self) -> PagePermissions { self.perms } /// Retrieve the offset in the file backing the mapping (if any) where the mapping begins. #[inline(always)] pub fn offset(&self) -> u64 { self.offset } /// Retrieve the major and minor numbers of the file backing the mapping, if any. #[inline(always)] pub fn device_numbers(&self) -> DeviceNumbers { self.dev } /// Retrieve the inode of the file backing the mapping, if any. #[inline(always)] pub fn inode(&self) -> u64 { self.inode } /// Retrieve the name of the file backing the mapping (if any), or a pseudo-path, as described /// in [`procfs(5)`]. /// /// [`procfs(5)`]: https://man7.org/linux/man-pages/man5/proc.5.html pub fn path(&self) -> Option<&str> { self.pathname.as_ref().map(|p| p.as_ref()) } } impl std::str::FromStr for MapsEntry { type Err = PageMapError; fn from_str(s: &str) -> Result<Self> { let s: Vec<_> = s.split_ascii_whitespace().collect(); Ok(MapsEntry { vma: s[0].parse()?, perms: s[1].parse()?, offset: u64::from_str_radix(s[2], 16)?, dev: s[3].parse()?, inode: s[4].parse()?, pathname: s.get(5).map(|&p| p.to_owned()), }) } } impl fmt::Display for MapsEntry { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // TODO write!( f, "{} {} {:20} {} {} {:?}", self.vma, self.perms, self.offset, self.dev, self.inode, self.pathname ) } } /////////////////////////////////////////////////////////////////////////////////////////////////// // // tests // /////////////////////////////////////////////////////////////////////////////////////////////////// #[cfg(test)] mod tests { use super::*; #[test] fn test_valid_perms() { let perms = vec![ "---s", "---p", "r--s", "r--p", "rw-s", "-w-s", "-w-p", "--xs", "--xp", "rw-p", "r-xp", ]; for p in perms { eprint!("{:#?} --> ", p); let pp = p.parse::<PagePermissions>().unwrap(); eprintln!("{:?}", pp); assert_eq!(format!("{}", pp), p); } } //#[test] ////#[should_panic] //fn test_invalid_perms() { // assert!("----".parse::<PagePermissions>().is_err()); //} #[test] fn test_maps_entry() -> std::result::Result<(), PageMapError> { let start: u64 = u64::from_str_radix("7f368bc85000", 16).unwrap(); eprintln!("start = {:#?}", start); let maps_entries = vec![ "7f368bc85000-7f368bca7000 r--s 00000000 fe:00 400910 /usr/share/zsh/functions/Completion/Base.zwc", "7f368bcaf000-7f368bcb3000 rw-p 00000000 00:00 0", "7f368bcc2000-7f368bcc3000 ---p 0000f000 fe:00 13377416 /usr/lib/x86_64-linux-gnu/zsh/5.7.1/zsh/complist.so", "7ffcec729000-7ffcec784000 rw-p 00000000 00:00 0 [stack]", "7ffcec7d1000-7ffcec7d3000 r-xp 00000000 00:00 0 [vdso]", ]; for line in maps_entries { eprintln!("line = {:#?}", line); let entry = line.parse::<MapsEntry>()?; eprintln!("entry = {:#?}", entry); eprintln!("pretty entry = {}\n", entry); } Ok(()) } }
In 1964, Ducati dipped a toe into the large cruiser market with the mighty Apollo, a 1,256cc vee-four heavyweight designed to take on Harley-Davidson in the police market. The Apollo never made it into production, but 46 years later, its spiritual successor has appeared. First, the stats: the power output of the Ducati Diavel is listed at 162hp, with 94lb/ft of torque. There’s a slipper clutch to prevent rear-wheel lockup. Suspension is fully adjustable front and rear. The front brakes are 4-piston Brembo Monoblocs, and ABS is standard. Instruments are split between a bar-mounted display and a tank unit. And unlike most cruisers, the Ducati Diavel won’t break the scales. At just 207kg (456lb), it’s 15kg heavier than a Multistrada 1200 S Touring. And a remarkable 27kg less than the diminutive Harley Sportster 883 SuperLow. [Full specs in this PDF.] At the back is that 8-inch wide rear tire, a subject of much speculation. So no, the Ducati Diavel won’t be as nimble as an 848 EVO. But Ducati will have worked hard to get the most out of the rubber, which is a state-of-the-art 240/45 ZR17 Pirelli Diablo Rosso II hoop. There are two models: the stock bike, and a Carbon variant (shown here) that weighs 3kg less and sports a slightly upgraded specification, including Marchesini forged wheels. The Diavel probably isn’t a make-or-break bike for Ducati. And warning shots have already been fired over its bows, with some Ducatisti believing that the Bologna brand should stick to the sportier end of the market. But the upside potential is enormous. The obvious parallel is the Porsche Cayenne SUV, another vehicle that stirred controversy during its gestation but went on to become a sales success. Then of course, there are the looks. As they say, the devil is in the details—so what do you think?
/** * A protocol definition and call dispatcher. * * Encapsulates the state of a protocol definition (registered implementations) and provides type dispatch * utilities. A subclass of this class is generated and instantiated for each registered protocol. The methods in * the protocol's interface are then stubbed out to delegate calls to the appropriate type based on the dispatch * argument. */ protected static class Dispatcher { private final Object lock = new Object(); private final Class<?> api; private volatile Map<Class<?>, Object> implementations = new IdentityHashMap<>(); private volatile Map<Class<?>, Object> dispatchTable = new IdentityHashMap<>(); /** * Initializes an instance with the given protocol API definition. * * @param api A {@link Class} instance representing an interface that provides the contract for the protocol. */ protected Dispatcher (Class<?> api) { this.api = api; } void protocols$$extend (Class<?> type, Object implementation) { synchronized (lock) { if (implementations.containsKey(type)) { throw new IllegalStateException("Protocol " + api.getName() + " already has an implementation for" + " type " + type.getName() + "."); } Map<Class<?>, Object> copy = new IdentityHashMap<>(implementations); copy.put(type, implementation); implementations = copy; // Reset the dispatch table cache. The initial cache contains any types that are directly registered. dispatchTable = new IdentityHashMap<>(implementations); } } /** * Determines the protocol implementation to delegate to for the given dispatch argument. Lookups are cached * based on the argument's type. Caches are cleared when the implementation registry changes. Precedence is: * * * Class * * Super-classes (excluding Object) * * Interfaces * * Super-class' interfaces * * Object * * @param target The dispatch object who's type will be used to perform the implementation lookup. * @return The protocol implementation or null if no match was found. */ protected final Object protocols$$getImplementation (Object target) { Class<?> dispatchType; // use Void in place of null if (target == null) { dispatchType = Void.class; } else { dispatchType = target.getClass(); } // Check the cache Object implementation = dispatchTable.get(dispatchTable); if (implementation != null) { return implementation; } // Traverse the type hierarchy to find an implementation implementation = findImplementation(dispatchType); // Error if no implementation was found for dispatchType if (implementation == null) { throw new IllegalArgumentException("Protocol " + api.getName() + " is not defined for " + dispatchType.getName()); } else { // Update the cache synchronized (lock) { Map<Class<?>, Object> copy = new IdentityHashMap<>(dispatchTable); copy.put(dispatchType, implementation); dispatchTable = copy; } return implementation; } } private Object findImplementation (Class<?> dispatchClass) { if (dispatchClass == null) { return null; } // Do we have a matching implementation? Object implementation = implementations.get(dispatchClass); if (implementation != null) { return implementation; } // Do our super-classes have an implementation? implementation = findImplInSuperclasses(dispatchClass); if (implementation != null) { return implementation; } // Do our interfaces or our super-classes have an implementation? implementation = findImplInInterfaces(dispatchClass); if (implementation != null) { return implementation; } // Does Object have an implementation? return implementations.get(Object.class); } private Object findImplInSuperclasses (Class<?> dispatchClass) { Object implementation; // Check all super classes (minus Object) Class<?> superclass = dispatchClass.getSuperclass(); // Object's superclass is null. Avoid checking for Object.class explicitly due to potential classloader // issues. while (superclass != null && superclass.getSuperclass() != null) { implementation = implementations.get(superclass); if (implementation != null) { return implementation; } superclass = superclass.getSuperclass(); } return null; } private Object findImplInInterfaces (Class<?> dispatchClass) { if (dispatchClass == null) { return null; } Object implementation = implementations.get(dispatchClass); if (implementation != null) { return implementation; } // Traverse interfaces in order for (Class<?> i : dispatchClass.getInterfaces()) { // Traverse super-interfaces implementation = findImplInInterfaces(i); if (implementation != null) { return implementation; } } // Also look through super-classes interfaces return findImplInInterfaces(dispatchClass.getSuperclass()); } }
package socket func (c *client) handleChatbotRequest(request string) string { return "you just said " + request }
<reponame>trussworks/mymove package ghcrateengine import ( "fmt" "testing" "github.com/gofrs/uuid" "github.com/transcom/mymove/pkg/models" "github.com/transcom/mymove/pkg/services" "github.com/transcom/mymove/pkg/testdatagen" "github.com/transcom/mymove/pkg/unit" ) func (suite *GHCRateEngineServiceSuite) Test_priceDomesticFirstDaySIT() { suite.setupDomesticServiceAreaPrice(models.ReServiceCodeDDFSIT, ddfsitTestServiceArea, ddfsitTestIsPeakPeriod, ddfsitTestBasePriceCents, ddfsitTestEscalationCompounded) suite.T().Run("destination golden path", func(t *testing.T) { priceCents, _, err := priceDomesticFirstDaySIT(suite.DB(), models.ReServiceCodeDDFSIT, DefaultContractCode, ddfsitTestRequestedPickupDate, ddfsitTestWeight, ddfsitTestServiceArea) suite.NoError(err) suite.Equal(ddfsitTestPriceCents, priceCents) }) suite.T().Run("invalid service code", func(t *testing.T) { _, _, err := priceDomesticFirstDaySIT(suite.DB(), models.ReServiceCodeCS, DefaultContractCode, ddfsitTestRequestedPickupDate, ddfsitTestWeight, ddfsitTestServiceArea) suite.Error(err) suite.Contains(err.Error(), "unsupported first day sit code") }) suite.T().Run("invalid weight", func(t *testing.T) { badWeight := unit.Pound(250) _, _, err := priceDomesticFirstDaySIT(suite.DB(), models.ReServiceCodeDDFSIT, DefaultContractCode, ddfsitTestRequestedPickupDate, badWeight, ddfsitTestServiceArea) suite.Error(err) suite.Contains(err.Error(), "weight of 250 less than the minimum") }) suite.T().Run("not finding a rate record", func(t *testing.T) { _, _, err := priceDomesticFirstDaySIT(suite.DB(), models.ReServiceCodeDDFSIT, "BOGUS", ddfsitTestRequestedPickupDate, ddfsitTestWeight, ddfsitTestServiceArea) suite.Error(err) suite.Contains(err.Error(), "could not fetch domestic destination first day SIT rate") }) suite.T().Run("not finding a contract year record", func(t *testing.T) { twoYearsLaterPickupDate := ddfsitTestRequestedPickupDate.AddDate(2, 0, 0) _, _, err := priceDomesticFirstDaySIT(suite.DB(), models.ReServiceCodeDDFSIT, DefaultContractCode, twoYearsLaterPickupDate, ddfsitTestWeight, ddfsitTestServiceArea) suite.Error(err) suite.Contains(err.Error(), "could not fetch contract year") }) } func (suite *GHCRateEngineServiceSuite) Test_priceDomesticAdditionalDaysSIT() { suite.setupDomesticServiceAreaPrice(models.ReServiceCodeDDASIT, ddasitTestServiceArea, ddasitTestIsPeakPeriod, ddasitTestBasePriceCents, ddasitTestEscalationCompounded) suite.T().Run("destination golden path", func(t *testing.T) { priceCents, _, err := priceDomesticAdditionalDaysSIT(suite.DB(), models.ReServiceCodeDDASIT, DefaultContractCode, ddasitTestRequestedPickupDate, ddasitTestWeight, ddasitTestServiceArea, ddasitTestNumberOfDaysInSIT) suite.NoError(err) suite.Equal(ddasitTestPriceCents, priceCents) }) suite.T().Run("invalid service code", func(t *testing.T) { _, _, err := priceDomesticAdditionalDaysSIT(suite.DB(), models.ReServiceCodeDDFSIT, DefaultContractCode, ddasitTestRequestedPickupDate, ddasitTestWeight, ddasitTestServiceArea, ddasitTestNumberOfDaysInSIT) suite.Error(err) suite.Contains(err.Error(), "unsupported additional day sit code") }) suite.T().Run("invalid weight", func(t *testing.T) { badWeight := unit.Pound(499) _, _, err := priceDomesticAdditionalDaysSIT(suite.DB(), models.ReServiceCodeDDASIT, DefaultContractCode, ddasitTestRequestedPickupDate, badWeight, ddasitTestServiceArea, ddasitTestNumberOfDaysInSIT) suite.Error(err) suite.Contains(err.Error(), "weight of 499 less than the minimum") }) suite.T().Run("not finding a rate record", func(t *testing.T) { _, _, err := priceDomesticAdditionalDaysSIT(suite.DB(), models.ReServiceCodeDDASIT, "BOGUS", ddasitTestRequestedPickupDate, ddasitTestWeight, ddasitTestServiceArea, ddasitTestNumberOfDaysInSIT) suite.Error(err) suite.Contains(err.Error(), "could not fetch domestic destination additional days SIT rate") }) suite.T().Run("not finding a contract year record", func(t *testing.T) { twoYearsLaterPickupDate := ddasitTestRequestedPickupDate.AddDate(2, 0, 0) _, _, err := priceDomesticAdditionalDaysSIT(suite.DB(), models.ReServiceCodeDDASIT, DefaultContractCode, twoYearsLaterPickupDate, ddasitTestWeight, ddasitTestServiceArea, ddasitTestNumberOfDaysInSIT) suite.Error(err) suite.Contains(err.Error(), "could not fetch contract year") }) } func (suite *GHCRateEngineServiceSuite) Test_priceDomesticPickupDeliverySITSameZip3s() { dshZipDest := "30907" dshZipSITDest := "30901" // same zip3 dshDistance := unit.Miles(15) suite.T().Run("destination golden path for same zip3s", func(t *testing.T) { suite.setupDomesticServiceAreaPrice(models.ReServiceCodeDSH, dddsitTestServiceArea, dddsitTestIsPeakPeriod, dddsitTestDomesticServiceAreaBasePriceCents, dddsitTestEscalationCompounded) priceCents, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, testdatagen.DefaultContractCode, dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, dshZipDest, dshZipSITDest, dshDistance) suite.NoError(err) expectedPrice := unit.Cents(53187) // dddsitTestDomesticServiceAreaBasePriceCents * (dddsitTestWeight / 100) * distance * dddsitTestEscalationCompounded suite.Equal(expectedPrice, priceCents) }) suite.T().Run("invalid service code", func(t *testing.T) { _, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeCS, testdatagen.DefaultContractCode, dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, dshZipDest, dshZipSITDest, dshDistance) suite.Error(err) suite.Contains(err.Error(), "unsupported pickup/delivery SIT code") }) suite.T().Run("invalid weight", func(t *testing.T) { badWeight := unit.Pound(250) _, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, testdatagen.DefaultContractCode, dddsitTestRequestedPickupDate, badWeight, dddsitTestServiceArea, dddsitTestSchedule, dshZipDest, dshZipSITDest, dshDistance) suite.Error(err) expectedError := fmt.Sprintf("weight of %d less than the minimum", badWeight) suite.Contains(err.Error(), expectedError) }) suite.T().Run("bad destination zip", func(t *testing.T) { _, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, testdatagen.DefaultContractCode, dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, "309", dshZipSITDest, dshDistance) suite.Error(err) suite.Contains(err.Error(), "invalid destination postal code") }) suite.T().Run("bad SIT final destination zip", func(t *testing.T) { _, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, testdatagen.DefaultContractCode, dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, dshZipDest, "1234", dshDistance) suite.Error(err) suite.Contains(err.Error(), "invalid SIT final destination postal code") }) suite.T().Run("error from shorthaul pricer", func(t *testing.T) { _, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, "BOGUS", dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, dshZipDest, dshZipSITDest, dshDistance) suite.Error(err) suite.Contains(err.Error(), "could not price shorthaul") }) } func (suite *GHCRateEngineServiceSuite) Test_priceDomesticPickupDeliverySIT50PlusMilesDiffZip3s() { dlhZipDest := "30907" dlhZipSITDest := "36106" // different zip3 dlhDistance := unit.Miles(305) // > 50 miles suite.T().Run("destination golden path for > 50 miles with different zip3s", func(t *testing.T) { suite.setupDomesticLinehaulPrice(dddsitTestServiceArea, dddsitTestIsPeakPeriod, dddsitTestWeightLower, dddsitTestWeightUpper, dddsitTestMilesLower, dddsitTestMilesUpper, dddsitTestDomesticLinehaulBasePriceMillicents, dddsitTestEscalationCompounded) priceCents, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, testdatagen.DefaultContractCode, dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, dlhZipDest, dlhZipSITDest, dlhDistance) suite.NoError(err) expectedPriceMillicents := unit.Millicents(45944438) // dddsitTestDomesticLinehaulBasePriceMillicents * (dddsitTestWeight / 100) * distance * dddsitTestEscalationCompounded expectedPrice := expectedPriceMillicents.ToCents() suite.Equal(expectedPrice, priceCents) }) suite.T().Run("error from linehaul pricer", func(t *testing.T) { _, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, "BOGUS", dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, dlhZipDest, dlhZipSITDest, dlhDistance) suite.Error(err) suite.Contains(err.Error(), "could not price linehaul") }) } func (suite *GHCRateEngineServiceSuite) Test_priceDomesticPickupDeliverySIT50MilesOrLessDiffZip3s() { domOtherZipDest := "30907" domOtherZipSITDest := "29801" // different zip3 domOtherDistance := unit.Miles(37) // <= 50 miles suite.T().Run("destination golden path for <= 50 miles with different zip3s", func(t *testing.T) { suite.setupDomesticOtherPrice(models.ReServiceCodeDDDSIT, dddsitTestSchedule, dddsitTestIsPeakPeriod, dddsitTestDomesticOtherBasePriceCents, dddsitTestEscalationCompounded) priceCents, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, testdatagen.DefaultContractCode, dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, domOtherZipDest, domOtherZipSITDest, domOtherDistance) suite.NoError(err) expectedPrice := unit.Cents(58355) // dddsitTestDomesticOtherBasePriceCents * (dddsitTestWeight / 100) * dddsitTestEscalationCompounded suite.Equal(expectedPrice, priceCents) }) suite.T().Run("not finding a rate record", func(t *testing.T) { _, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, "BOGUS", dddsitTestRequestedPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, domOtherZipDest, domOtherZipSITDest, domOtherDistance) suite.Error(err) suite.Contains(err.Error(), "could not fetch domestic destination SIT delivery rate") }) suite.T().Run("not finding a contract year record", func(t *testing.T) { twoYearsLaterPickupDate := dddsitTestRequestedPickupDate.AddDate(2, 0, 0) _, _, err := priceDomesticPickupDeliverySIT(suite.DB(), models.ReServiceCodeDDDSIT, testdatagen.DefaultContractCode, twoYearsLaterPickupDate, dddsitTestWeight, dddsitTestServiceArea, dddsitTestSchedule, domOtherZipDest, domOtherZipSITDest, domOtherDistance) suite.Error(err) suite.Contains(err.Error(), "could not fetch contract year") }) } func (suite *GHCRateEngineServiceSuite) Test_createPricerGeneratedParams() { params := services.PricingParams{ { Key: models.ServiceItemParamNamePriceRateOrFactor, Value: 40000.9, }, { Key: models.ServiceItemParamNameEscalationCompounded, Value: 1.06, }, { Key: models.ServiceItemParamNameIsPeak, Value: true, }, { Key: models.ServiceItemParamNameContractYearName, Value: "TRUSS_TEST", }, } testdatagen.MakeServiceItemParamKey(suite.DB(), testdatagen.Assertions{ ServiceItemParamKey: models.ServiceItemParamKey{ Key: models.ServiceItemParamNamePriceRateOrFactor, Description: "Price Rate Or Factor", Type: models.ServiceItemParamTypeDecimal, Origin: models.ServiceItemParamOriginPricer, }, }) testdatagen.MakeServiceItemParamKey(suite.DB(), testdatagen.Assertions{ ServiceItemParamKey: models.ServiceItemParamKey{ Key: models.ServiceItemParamNameEscalationCompounded, Description: "Escalation compounded", Type: models.ServiceItemParamTypeDecimal, Origin: models.ServiceItemParamOriginPricer, }, }) testdatagen.MakeServiceItemParamKey(suite.DB(), testdatagen.Assertions{ ServiceItemParamKey: models.ServiceItemParamKey{ Key: models.ServiceItemParamNameIsPeak, Description: "Is peak", Type: models.ServiceItemParamTypeBoolean, Origin: models.ServiceItemParamOriginPricer, }, }) testdatagen.MakeServiceItemParamKey(suite.DB(), testdatagen.Assertions{ ServiceItemParamKey: models.ServiceItemParamKey{ Key: models.ServiceItemParamNameContractYearName, Description: "Contract year name", Type: models.ServiceItemParamTypeString, Origin: models.ServiceItemParamOriginPricer, }, }) paymentServiceItem := testdatagen.MakePaymentServiceItem( suite.DB(), testdatagen.Assertions{}, ) suite.T().Run("payment service item params created for the pricer", func(t *testing.T) { paymentServiceItemParams, err := createPricerGeneratedParams(suite.DB(), paymentServiceItem.ID, params) suite.NoError(err) expectedValues := [4]string{"40000.9", "1.06", "true", "TRUSS_TEST"} for _, paymentServiceItemParam := range paymentServiceItemParams { switch paymentServiceItemParam.ServiceItemParamKey.Key { case models.ServiceItemParamNamePriceRateOrFactor: suite.Equal(expectedValues[0], paymentServiceItemParam.Value) case models.ServiceItemParamNameEscalationCompounded: suite.Equal(expectedValues[1], paymentServiceItemParam.Value) case models.ServiceItemParamNameIsPeak: suite.Equal(expectedValues[2], paymentServiceItemParam.Value) case models.ServiceItemParamNameContractYearName: suite.Equal(expectedValues[3], paymentServiceItemParam.Value) } } }) suite.T().Run("errors if PaymentServiceItemID is invalid", func(t *testing.T) { invalidID, _ := uuid.FromString("00000000-0000-0000-0000-000000000000") _, err := createPricerGeneratedParams(suite.DB(), invalidID, params) suite.Error(err) suite.Contains(err.Error(), "validation error with creating payment service item param") }) suite.T().Run("errors if PricingParm points to a serviceItem that doesnt originate from the Pricer", func(t *testing.T) { invalidParam := services.PricingParams{ { Key: models.ServiceItemParamNameServiceAreaOrigin, Value: 40000.9, }, } testdatagen.MakeServiceItemParamKey(suite.DB(), testdatagen.Assertions{ ServiceItemParamKey: models.ServiceItemParamKey{ Key: models.ServiceItemParamNameServiceAreaOrigin, Description: "service area actual", Type: models.ServiceItemParamTypeString, Origin: models.ServiceItemParamOriginPrime, }, }) _, err := createPricerGeneratedParams(suite.DB(), paymentServiceItem.ID, invalidParam) suite.Error(err) suite.Contains(err.Error(), "Service item param key is not a pricer param") }) suite.T().Run("errors if no PricingParms passed from the Pricer", func(t *testing.T) { emptyParams := services.PricingParams{} _, err := createPricerGeneratedParams(suite.DB(), paymentServiceItem.ID, emptyParams) suite.Error(err) suite.Contains(err.Error(), "PricingParams must not be empty") }) }
<filename>parking/parking.go package parking import ( "errors" "fmt" "github.com/ManojChandran/parking-lot/slot" "github.com/google/uuid" ) // Ticket delivers the parking ticket for vehicle type Ticket struct { ID string } type pEntrance struct { gateLocation string gateNumber string } type parking struct { parkingCompany string parkingAddress string // parkingEntrance [3]pEntrance carParking map[Ticket]*slot.Slot vanParking map[Ticket]*slot.Slot bikeParking map[Ticket]*slot.Slot } // Parking interface provide the list of avialable methods type Parking interface { SetParkingCompany(string, string) GetParkingTicket(string, string) (Ticket, error) GetVehicle(string) string VehicleExit(string) PayParking(string) DisplayParking() } func (p *parking) SetParkingCompany(parkingCompany, parkingAddress string) { p.parkingCompany = parkingCompany p.parkingAddress = parkingAddress } // BuildParkingCompany method will create a parking company instance func BuildParkingCompany() Parking { CarParking := make(map[Ticket]*slot.Slot) VanParking := make(map[Ticket]*slot.Slot) BikeParking := make(map[Ticket]*slot.Slot) return &parking{ carParking: CarParking, vanParking: VanParking, bikeParking: BikeParking, } } func createTicketID() Ticket { ID := uuid.New() return Ticket{ID: ID.String()} } // GetParkingTicket builds parking ticket func (p *parking) GetParkingTicket(vType, vNumber string) (Ticket, error) { var t Ticket if vType == "car" { c, err := slot.NewCarSlot() if err != nil { return t, errors.New("no slot available") } c.SetCarSlot(vNumber) t = createTicketID() p.carParking[t] = c } if vType == "van" { v, err := slot.NewVanSlot() if err != nil { return t, errors.New("no slot available") } v.SetVanSlot(vNumber) t = createTicketID() p.vanParking[t] = v } if vType == "bike" { b, err := slot.NewBikeSlot() if err != nil { return t, errors.New("no slot available") } b.SetBikeSlot(vNumber) t = createTicketID() p.bikeParking[t] = b } return t, nil } func (p *parking) GetVehicle(id string) string { t := Ticket{ID: id} pop, ok := p.carParking[t] if ok { return pop.VehicleNumber } return "no vehicle" } func (p *parking) VehicleExit(id string) { t := Ticket{ID: id} car, ok := p.carParking[t] if ok { car.CarExit() } van, ok := p.vanParking[t] if ok { van.VanExit() } bike, ok := p.bikeParking[t] if ok { bike.BikeExit() } } func (p *parking) PayParking(id string) { t := Ticket{ID: id} delete(p.carParking, t) delete(p.vanParking, t) delete(p.bikeParking, t) } func (p *parking) DisplayParking() { fmt.Println(p.parkingCompany) fmt.Println(p.parkingAddress) fmt.Println("*****Car *****") fmt.Println(slot.CarCount()) for _, c := range p.carParking { fmt.Println(c) } fmt.Println("*****Van *****") fmt.Println(slot.VanCount()) for _, v := range p.vanParking { fmt.Println(v) } fmt.Println("*****Bike ****") fmt.Println(slot.BikeCount()) for _, b := range p.bikeParking { fmt.Println(b) } // fmt.Println(p.parkingEntrance) // fmt.Println(p.parkingFloor) }
/** * Test getGetColumnPrivileges; does not modify database * For further testing see test lang.grantRevokeTest * @throws SQLException */ public void testGetColumnPrivileges() throws SQLException { try { getColumnPrivileges(null,null,null,null); fail ("expected error XJ103"); } catch (SQLException sqle) { assertSQLState("XJ103", sqle); } ResultSet[] rs = getColumnPrivileges(null,null,"",null); JDBC.assertEmpty(rs[0]); JDBC.assertEmpty(rs[1]); rs = getColumnPrivileges("","","",""); JDBC.assertEmpty(rs[0]); JDBC.assertEmpty(rs[1]); rs = getColumnPrivileges("%","%","%","%"); JDBC.assertEmpty(rs[0]); JDBC.assertEmpty(rs[1]); rs = getColumnPrivileges(null,"SYS","SYSTABLES","TABLEID"); JDBC.assertEmpty(rs[0]); JDBC.assertEmpty(rs[1]); }
public void testAdd() throws Exception { byte[] bytes=Bytes.toBytes(getName()); KeyValue kv=new KeyValue(bytes,bytes,bytes,bytes); this.kvsls.add(kv); assertTrue(this.kvsls.contains(kv)); assertEquals(1,this.kvsls.size()); KeyValue first=this.kvsls.first(); assertTrue(kv.equals(first)); assertTrue(Bytes.equals(kv.getValue(),first.getValue())); byte[] overwriteValue=Bytes.toBytes("overwrite"); KeyValue overwrite=new KeyValue(bytes,bytes,bytes,overwriteValue); this.kvsls.add(overwrite); assertEquals(1,this.kvsls.size()); first=this.kvsls.first(); assertTrue(Bytes.equals(overwrite.getValue(),first.getValue())); assertFalse(Bytes.equals(overwrite.getValue(),kv.getValue())); }
def reconstruction(rec_times, alpha_1, alpha_2, alpha_3, alpha_4): fname = 'projections.npy' print('Reading:', fname) projections = np.load(fname) print('projections:', projections.shape, projections.dtype) P = projections.reshape((projections.shape[0], -1)) print('P:', P.shape, P.dtype) fname = 'signals_data.npy' print('Reading:', fname) signals_data = np.load(fname) print('signals_data:', signals_data.shape, signals_data.dtype) fname = 'signals_time.npy' print('Reading:', fname) signals_time = np.load(fname) print('signals_time:', signals_time.shape, signals_time.dtype) signals = [] for time in rec_times: time_index, time = find_nearest(signals_time[0], time) signals.append(signals_data[:, time_index]) n_rows = projections.shape[1] n_cols = projections.shape[2] res = [4.4444, 4.4444] x_array_plot = (np.arange(n_cols + 1) - n_cols / 2.) * res[0] y_array_plot = (n_rows / 2. - np.arange(n_rows + 1)) * res[1] x_array = np.arange(n_cols) * res[0] - n_cols / 2. * res[0] y_array = n_rows / 2. * res[1] - np.arange(n_rows) * res[1] stop_criteria = 1e-10 max_iterations = 20 g_list, first_g = mfi(signals=signals, projections=projections, stop_criteria=stop_criteria, alpha_1=alpha_1, alpha_2=alpha_2, alpha_3=alpha_3, alpha_4=alpha_4, max_iterations=max_iterations) P_tilde = P / 1.0 f_tilde = signals[-1] / 1.0 number_sensors = 32. chi_square = np.sum((np.dot(P_tilde, g_list[-1].flatten()) - f_tilde) ** 2) / number_sensors print("Chi Squared: %f" % chi_square) return first_g, g_list, x_array_plot, y_array_plot
/** * insere o Vendas 'vendas' no banco de dados * * @param o * objeto a ser inserido * @return a chave primaria gerada * @author marcos * */ public class VendasDAO { public static int create(Vendas vendas) { if (vendas.getPk_Vendas() != 0) { throw new RuntimeException("Objeto ja existente"); }else if(vendas.getCliente() == null) { throw new RuntimeException("Cliente nao foi adicionado"); }else if(vendas.getFuncionario() == null) { throw new RuntimeException("Funcionario nao foi adicionado"); }else if(vendas.getCliente().getPk() == 0) { throw new RuntimeException("Cliente novo insira"); }else if(vendas.getFuncionario().getPk() == 0) { throw new RuntimeException("Funcionario novo insira"); }else if(vendas.getVendasItens().size() == 0||vendas.getVendasItens() == null) { throw new RuntimeException("Itens nao foram adicionados"); }else if(vendas.getFinanceiroEntradas().size() == 0||vendas.getFinanceiroEntradas() == null) { throw new RuntimeException("Financeiros nao foram adicionados"); } try { Connection conn = BancoDados.createConnection(); PreparedStatement stm = conn.prepareStatement( "insert into vendas (fk_cliente,fk_vendedor,datavenda,numero) values (?,?,?,?)", PreparedStatement.RETURN_GENERATED_KEYS); stm.setObject(1, vendas.getCliente().getPk()); stm.setObject(2, vendas.getFuncionario().getPk()); stm.setTimestamp(3, new Timestamp(vendas.getDataVenda().getTime())); stm.setInt(4, (int)vendas.getNumeroVenda()); stm.execute(); ResultSet result1 = stm.getGeneratedKeys(); result1.next(); int idGerado = result1.getInt(1); vendas.setPk_Vendas(idGerado); for(int i=0;i <vendas.getVendasItens().size();i++) { vendas.getVendasItens().get(i).setFk_venda(idGerado); } for(int i=0;i <vendas.getFinanceiroEntradas().size();i++) { vendas.getFinanceiroEntradas().get(i).setFk_venda(idGerado); } stm.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return 0; } /** * Seleciona todos elementos onde pk no banco de dados for true * * @param pk * @return null * @author marcos */ public static Vendas retreave(int pk) { try { Connection conn2 = BancoDados.createConnection(); PreparedStatement stm = conn2.prepareStatement("select * from vendas where pk_venda = ?"); stm.setInt(1, pk); stm.executeQuery(); ResultSet resultset = stm.getResultSet(); if (!resultset.next()) { throw new RuntimeException("Chave primaria nao encontrada"); } return new Vendas(pk, ClienteDAO.retreave(resultset.getInt("fk_cliente")), FuncionarioDAO.retreave(resultset.getInt("fk_vendedor")), resultset.getDate("datavenda"), resultset.getInt("numero"), VendasItensDAO.retreaveAll(resultset.getInt("pk_venda")), FinanceiroEntradasDAO.retreaveAll(resultset.getInt("pk_venda"))); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } /** * Seleciona todos elementos de todas pk no banco de dados * * @param pk * @return null * @author marcos */ public static ArrayList<Vendas> retreaveAll() { try { Connection conn2 = BancoDados.createConnection(); PreparedStatement stm = conn2.prepareStatement("select * from vendas"); stm.executeQuery(); ResultSet resultset = stm.getResultSet(); ArrayList<Vendas> auxiliar = new ArrayList<>(); while (resultset.next()) { auxiliar.add(new Vendas(resultset.getInt("pk_venda"), ClienteDAO.retreave(resultset.getInt("fk_cliente")), FuncionarioDAO.retreave(resultset.getInt("fk_vendedor")), resultset.getDate("datavenda"), resultset.getInt("numero"))); } return auxiliar; } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } /** * Seleciona todos elementos de todas pk no banco de dados * * @param pk * @return null * @author marcos */ public static ArrayList<Vendas> retreaveDate(String date) { try { Connection conn2 = BancoDados.createConnection(); PreparedStatement stm = conn2.prepareStatement("SELECT * FROM vendas Where datavenda >= ?"); stm.setDate(1, Date.valueOf(date)); stm.executeQuery(); ResultSet resultset = stm.getResultSet(); ArrayList<Vendas> auxiliar = new ArrayList<>(); while (resultset.next()) { auxiliar.add(new Vendas(resultset.getInt("pk_venda"), ClienteDAO.retreave(resultset.getInt("fk_cliente")), FuncionarioDAO.retreave(resultset.getInt("fk_vendedor")), resultset.getDate("datavenda"), resultset.getInt("numero"))); } return auxiliar; } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } /** * Atualiza o elemento no banco de dados onde a pk informada for true * * @param venda * @author marcos */ public static void update(Vendas venda) { if (venda.getPk_Vendas() == 0) { throw new RuntimeException("Objeto nao existe no BD"); } try { Connection conn = BancoDados.createConnection(); PreparedStatement stm = conn.prepareStatement("UPDATE vendas SET datavenda=?, numero=? WHERE pk_venda=?;"); stm.setTimestamp(1, new Timestamp(venda.getDataVenda().getTime())); stm.setInt(2, venda.getNumeroVenda()); stm.setInt(3,venda.getPk_Vendas()); stm.execute(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } /** * Deleta o elemento no banco de dados onde a pk informada for true * * @param venda * @author marcos */ public static void delete(Vendas venda) { if (venda.getPk_Vendas() == 0) { throw new RuntimeException("Objeto nao existe no BD"); } try { Connection conn = BancoDados.createConnection(); PreparedStatement stm = conn.prepareStatement("DELETE FROM vendas WHERE pk_venda=?;"); stm.setInt(1, venda.getPk_Vendas()); stm.execute(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
The current scientific theory outlining the major events during the development of life Not to be confused with history of evolutionary thought This article is about the evolution of all life on Earth. For more detailed and comprehensive coverage, see evolutionary history of life This timeline of the evolutionary history of life represents the current scientific theory outlining the major events during the development of life on planet Earth. In biology, evolution is any change across successive generations in the heritable characteristics of biological populations. Evolutionary processes give rise to diversity at every level of biological organization, from kingdoms to species, and individual organisms and molecules, such as DNA and proteins. The similarities between all present day organisms indicate the presence of a common ancestor from which all known species, living and extinct, have diverged through the process of evolution. More than 99 percent of all species, amounting to over five billion species,[1] that ever lived on Earth are estimated to be extinct.[2][3] Estimates on the number of Earth's current species range from 10 million to 14 million,[4] of which about 1.2 million have been documented and over 86 percent have not yet been described.[5] However, a May 2016 scientific report estimates that 1 trillion species are currently on Earth, with only one-thousandth of one percent described.[6] While the dates given in this article are estimates based on scientific evidence, there has been controversy between more traditional views of increased biodiversity through a cone of diversity with the passing of time and the view that the basic pattern on Earth has been one of annihilation and diversification and that in certain past times, such as the Cambrian explosion, there was great diversity.[7][8] Extinction [ edit ] Visual representation of the history of life on Earth as a spiral Species go extinct constantly as environments change, as organisms compete for environmental niches, and as genetic mutation leads to the rise of new species from older ones. Occasionally biodiversity on Earth takes a hit in the form of a mass extinction in which the extinction rate is much higher than usual.[9] A large extinction-event often represents an accumulation of smaller extinction- events that take place in a relatively brief period of time.[10] The first known mass extinction in earth's history was the Great Oxygenation Event 2.4 billion years ago. That event led to the loss of most of the planet's obligate anaerobes. Researchers have identified five major extinction events in earth's history since:[11] End of the Ordovician: 440 million years ago, 86% of all species lost, including graptolites Late Devonian: 375 million years ago, 75% of species lost, including most trilobites End of the Permian, "The Great Dying": 251 million years ago, 96% of species lost, including tabulate corals, and most extant trees and synapsids End of the Triassic: 200 million years ago, 80% of species lost, including all of the conodonts End of the Cretaceous: 66 million years ago, 76% of species lost, including all of the ammonites, mosasaurs, ichthyosaurs, plesiosaurs, pterosaurs, and nonavian dinosaurs (Dates and percentages represent estimates.) Smaller extinction-events have occurred in the periods between these larger catastrophes, with some standing at the delineation points of the periods and epochs recognized by scientists in geologic time. The Holocene extinction event is currently under way.[12] Factors in mass extinctions include continental drift, changes in atmospheric and marine chemistry, volcanism and other aspects of mountain formation, changes in glaciation, changes in sea level, and impact events.[10] Detailed timeline [ edit ] In this timeline, Ma (for megaannum) means "million years ago," ka (for kiloannum) means "thousand years ago," and ya means "years ago." Hadean Eon [ edit ] 4000 Ma and earlier. Archean Eon [ edit ] 4000 Ma – 2500 Ma Proterozoic Eon [ edit ] 2500 Ma – 542 Ma. Contains the Palaeoproterozoic, Mesoproterozoic and Neoproterozoic eras. Phanerozoic Eon [ edit ] 542 Ma – present The Phanerozoic Eon, literally the "period of well-displayed life," marks the appearance in the fossil record of abundant, shell-forming and/or trace-making organisms. It is subdivided into three eras, the Paleozoic, Mesozoic and Cenozoic, which are divided by major mass extinctions. Palaeozoic Era [ edit ] 542 Ma – 251.0 Ma and contains the Cambrian, Ordovician, Silurian, Devonian, Carboniferous and Permian periods. [60] Ferns first appear in the fossil record about 360 million years ago in the late Devonian period. Mesozoic Era [ edit ] Utatsusaurus is the earliest-known form of an is the earliest-known form of an ichthyopterygian From 251.4 Ma to 66 Ma and containing the Triassic, Jurassic and Cretaceous periods. Cenozoic Era [ edit ] 66 Ma – present Icaronycteris appeared 52.2 million years ago The batappeared 52.2 million years ago Grass flowers Historical extinctions [ edit ] See also [ edit ] References [ edit ] Bibliography [ edit ]
// Embedded Framework Generated File: // Date:20/04/2009 // Time:10:32:37 AM // #ifndef _DEVICES_H #define _DEVICES_H // instance devices ids macro definitions #define DRV_CAN_0 0 #define SERVER 0 #define CANOPEN_0 0 #endif
/// Raises a floating point number to the complex power `self`. pub fn expf(self, base: T) -> Complex<T> { // formula: x^(a+bi) = x^a x^bi = x^a e^(b ln(x) i) // = from_polar(x^a, b ln(x)) Complex::from_polar(base.pow(self.re), self.im * base.ln()) }
// finishWithError notifies discovery service about an error. func (c *Callbacker) finishWithError(id string) func(e interface{}) error { return func(e interface{}) error { c.sendEvent(GameFinishedWithError, DiscoveryTopic, e) c.sendEvent(PlayerDone, id, e) return nil } }
Effect of Nanoparticle Synthetic Conditions on Ligand Coating Integrity and Subsequent Nano-Biointeractions. Most current nanoparticle formulations have relatively low clearance efficiency, which may hamper their likelihood for clinical translation. Herein, we sought to compare the clearance and cellular distribution profiles between sub-5 nm, renally-excretable silver sulfide nanoparticles (Ag2S-NPs) synthesized via either a bulk, high temperature, or a microfluidic, room temperature approach. We found that the thermolysis approach led to significant ligand degradation, but the surface coating shell was unaffected by the microfluidic synthesis. We demonstrated that the clearance was improved for Ag2S-NPs with intact ligands, with less uptake in the liver. Moreover, differential distribution in hepatic cells was observed, where Ag2S-NPs with degraded coatings tend to accumulate in Kupffer cells and those with intact coatings are more frequently found in hepatocytes. Therefore, understanding the impact of synthetic processes on ligand integrity and subsequent nano-biointeractions will aid in designing nanoparticle platforms with enhanced clearance and desired distribution profiles.
Urinary stones, active component, U.S. Armed Forces, 2001-2010. Urinary stones can cause debilitating morbidity that impairs the operational effectiveness of affected members of the U.S. Armed Forces. This report describes a "gender shift," i.e., the narrowing of prevalence differences between men and women, similar to trends described in the U.S. civilian population. Rates of incident diagnoses of urinary stones increased in the active component during the past decade - particularly among females. On average, 60 service members were medically evacuated from combat zones each year during the period. Service members with a history of urinary stones should be counseled on reducing risk, particularly in the deployment setting.
package info.jerrinot.subzero.it.hz38; import info.jerrinot.subzero.it.BaseSmokeTests; public class Hazelcast38_SmokeTest extends BaseSmokeTests { }
package alluxio import ( datav1alpha1 "github.com/fluid-cloudnative/fluid/api/v1alpha1" "github.com/fluid-cloudnative/fluid/pkg/ddc/base" v1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/client/fake" "testing" ) func getTestAlluxioEngine(client client.Client, name string, namespace string) *AlluxioEngine { runTime := &datav1alpha1.AlluxioRuntime{} runTimeInfo,_ := base.BuildRuntimeInfo(name,namespace,"alluxio",datav1alpha1.Tieredstore{}) engine := &AlluxioEngine{ runtime: runTime, name: name, namespace: namespace, Client: client, runtimeInfo: runTimeInfo, Log: NullLogger{}, } return engine } func TestAlluxioEngine_GetDeprecatedCommonLabelname(t *testing.T){ } func TestAlluxioEngine_HasDeprecatedCommonLabelname(t *testing.T){ // worker-name = e.name+"-worker" daemonset := &v1.DaemonSet{ ObjectMeta: metav1.ObjectMeta{ Name: "hbase-worker", Namespace: "fluid", }, Spec: v1.DaemonSetSpec{ Template:corev1.PodTemplateSpec{ Spec: corev1.PodSpec{NodeSelector: map[string]string{"data.fluid.io/storage-fluid-hbase":"selector"}}, }, }, } runtimeObjs := []runtime.Object{} runtimeObjs = append(runtimeObjs, daemonset) scheme := runtime.NewScheme() scheme.AddKnownTypes(v1.SchemeGroupVersion,daemonset) fakeClient := fake.NewFakeClientWithScheme(scheme, runtimeObjs...) alluxioEngine := getTestAlluxioEngine(fakeClient,"hbase","fluid") alluxioEngine.HasDeprecatedCommonLabelname() }
import { Stream } from 'xstream'; export declare type Predicate = (ev: any) => boolean; export declare type PreventDefaultOpt = boolean | Predicate | Comparator; export declare type Comparator = { [key: string]: any; }; export declare function fromEvent(element: Element | Document, eventName: string, useCapture?: boolean, preventDefault?: PreventDefaultOpt, passive?: boolean): Stream<Event>; export declare function preventDefaultConditional(event: any, preventDefault: PreventDefaultOpt): void;
<reponame>Ingenieur2/2020-06-otus-java-korolev<gh_stars>0 package ru.package01; import java.rmi.Remote; import java.rmi.RemoteException; public interface EchoInterface extends Remote { String echo(String data) throws RemoteException; }
/* Appends to 'b' the OXM formats that expresses 'match', without header or * padding. * * OXM differs slightly among versions of OpenFlow. Specify the OpenFlow * version in use as 'version'. * * This function can cause 'b''s data to be reallocated. */ void oxm_put_raw(struct ofpbuf *b, const struct match *match, enum ofp_version version) { nx_put_raw(b, version, match, 0, 0); }
/** * Tests to make sure we get a properly constructed map of samples to sequencing objects. */ @Test @WithMockUser(username = "admin", roles = "ADMIN") public void testGetUniqueSamplesForSequencingObjectsSuccess() { SequencingObject s1 = objectService.read(1L); SequencingObject s2 = objectService.read(2L); Sample sa1 = sampleService.read(1L); Sample sa2 = sampleService.read(2L); Map<Sample, SequencingObject> sampleMap = objectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(s1,s2)); assertEquals("Incorrect number of results returned in sample map", 2, sampleMap.size()); assertEquals("Incorrect sequencing object mapped to sample", s2.getId(), sampleMap.get(sa1).getId()); assertEquals("Incorrect sequencing object mapped to sample", s1.getId(), sampleMap.get(sa2).getId()); }
// New creates a new default seccomp configuration instance func New() *Config { return &Config{ enabled: seccomp.IsEnabled(), profile: seccomp.DefaultProfile(), } }
/** * abstract class Figure. * * @author Sergei Shangraeev ([email protected]). * @version 1.0. */ public abstract class Figure { final static int LENGHTDESK = 8; /* final Cell position[][] = (int x, int y); public abstract Cell[] way(Cell source, Cell dest) throw ImpossibleMoveException { }*/ }
Performance of Tikhonov Regularization with Different Constraints in Electrocardiographic Imaging The purpose of electrocardiographic imaging (ECGI) is to obtain high-resolution information about the electrical activity of the heart. ECGI is the estimation of heart’s electrical activity using body surface potentials and a mathematical model that includes the torso, heart geometry, and tissue conductivity information between these surfaces. Tikhonov regularization is one of the most preferred methods in ECGI due to its fast and straightforward applicability. In this study, we employed zero-order, second-order, and fractional Tikhonov solution techniques for the ECGI problem and assessed their accuracy and robustness against the measurement noise.
A unique population in northern Finland has helped reveal that schizophrenia, some autism spectrum disorders and other forms of cognitive impairment may all share a common genetic pathway. In Finland, there exist several small communities that used to live for years in isolation. Amongst the descendants of these groups, otherwise rare genes occur more regularly than elsewhere in the country because a level of inbreeding was almost inevitable. Nelson Freimer at the University of California, Los Angeles, and colleagues studied one of these communities, where schizophrenia and other neurological disorders are unusually common. His team first searched for any genetic deletions – chunks of DNA that are missing from a chromosome – that were more common in this group than in the general population. They found a promising candidate on chromosome 22. A deletion on this chromosome was present in 18 of 173 people from this isolated group, but in just one of the 1586 samples taken from people spread throughout the rest of Finland. Advertisement Tests confirmed that people with schizophrenia or cognitive impairments were more likely to be missing this chunk of DNA. Identifying this deletion as a risk factor for schizophrenia and cognitive impairment puts us one step closer to understanding the biological processes at the root of such complex syndromes, says Freimer. Tying it all together Further investigation revealed that the deleted region normally contains a gene that makes an enzyme called topoisomerase III beta (TOP3B). This enzyme works to temporarily chop up DNA strands for a process called gene transcription, during which a particular part of the DNA is copied into RNA. A team led by Weidong Wang from the US National Institute on Aging in Baltimore, Maryland has been exploring the function of TOP3B in more detail. They say that it also has a role in regulating the winding and unwinding of RNA – the first enzyme found to do this in animal cells. In addition, they have found that TOP3B has a role in the disruptions that lead to fragile X syndrome. Like schizophrenia, fragile X has been linked to abnormal brain development. Although the researchers do not yet know how disruptions in TOP3B may affect brain development, they say the most likely explanation is that it changes the shape of the RNA, and thereby disturbs which RNA molecules get copied into proteins. This could cause neurons to grow and connect in a haphazard way. Disrupted neuronal connections have been associated with schizophrenia, autism and intellectual disability, so this could explain why a deletion of the TOP3B gene is associated with all three, says Wang. Journal reference: Nature Neuroscience, DOI:10.1038/nn.3484; Nature Neuroscience, DOI: 10.1038/nn.3479
<reponame>jwillemsen/ciaox11 // -*- C++ -*- /** * @file zmq_psdd_data_i.cpp * @author <NAME> * * @brief ZMQ Message data for PSDD implementation * * @copyright Copyright (c) Remedy IT Expertise BV */ #include "psdd4ccm/logger/psdd4ccm_log.h" #include <stdexcept> #include "zmq_psdd_data_i.h" #include "ace/Malloc_Allocator.h" namespace PSDD { namespace ZMQ { /** * Custom key allocator for ZMQ keyframe CDR streams. * As ZMQ performs binary matching on subscription keys * we need zero-ed memory allocated for the CDR buffers * to keep keyframe data consistent. */ class KeyAllocator : public ACE_New_Allocator { public: virtual void *malloc (size_t nbytes) { return this->calloc (nbytes); } }; // static singleton allocator instance static KeyAllocator key_allocator__; static void free_msg_data (void* /*data*/, void* hint) { std::unique_ptr<ACE_OutputCDR> safe_ptr; safe_ptr.reset (static_cast<ACE_OutputCDR*> (hint)); } PSDataOut_Impl::PSDataOut_Impl (bool msg_envelope) : use_msg_envelope_ (msg_envelope) {} ACE_OutputCDR& PSDataOut_Impl::key_out () { return *this->key_os_; } ACE_OutputCDR& PSDataOut_Impl::data_out () { return *(this->use_msg_envelope_ ? this->data_os_ : this->key_os_); } void PSDataOut_Impl::start_message () { try { this->key_os_ = std::make_unique<ACE_OutputCDR> ( (size_t)0, ACE_CDR_BYTE_ORDER, &key_allocator__); if (this->use_msg_envelope_) { this->data_os_ = std::make_unique<ACE_OutputCDR> ( (size_t)0); } } catch (const std::bad_alloc&) { throw ::CCM_PSDD::InternalError ( "PSDataOut_Impl::start_message", "NO_MEMORY"); } } void PSDataOut_Impl::set_byte_order (EByteOrder bo) { if (ACE_CDR_BYTE_ORDER != static_cast<int> (bo)) { this->key_os_->reset_byte_order (static_cast<int> (bo)); this->data_os_->reset_byte_order (static_cast<int> (bo)); } } void PSDataOut_Impl::finish_message (zmq_msg_t* keyframe, SafeMsg& skeyframe, zmq_msg_t* dataframe, SafeMsg& sdataframe) { PSDD4CCM_LOG_DEBUG ( "PSDataOut_Impl::finish_message" << " - storing keyframe data"); this->consolidate_message_frame (*this->key_os_); this->finish_message_frame ( this->key_os_.get (), keyframe); // safeguard initialized message skeyframe.reset (keyframe); // release resources this->key_os_.release (); // ZMQ will free later if (this->use_msg_envelope_) { this->consolidate_message_frame (*this->data_os_); PSDD4CCM_LOG_DEBUG ( "PSDataOut_Impl::finish_message" << " - data length=" << this->data_os_->length ()); // only initialize dataframe if there is actually something serialized if (this->data_os_->length () > 0) { PSDD4CCM_LOG_DEBUG ( "PSDataOut_Impl::finish_message" << " - storing dataframe data"); this->finish_message_frame ( this->data_os_.get (), dataframe); // safeguard initialized message sdataframe.reset (dataframe); // release resources this->data_os_.release (); // ZMQ will free later } } } void PSDataOut_Impl::consolidate_message_frame ( ACE_OutputCDR& os) { if (os.consolidate () != 0) { PSDD4CCM_LOG_PANIC ( "PSDataOut_Impl::consolidate_message_frame" << " - failed to consolidate output stream"); throw ::CCM_PSDD::InternalError ( "PSDataOut_Impl::consolidate_message_frame", "NO_MEMORY"); } } void PSDataOut_Impl::finish_message_frame ( ACE_OutputCDR* os, zmq_msg_t* msgframe) { const ACE_Message_Block *os_mb = os->begin (); void* data = const_cast<char*> (os_mb->rd_ptr ()); size_t datalen = os_mb->length (); PSDD4CCM_LOG_DEBUG ( "PSDataOut_Impl::finish_message_frame" << " - data=" << data << " datalen=" << datalen); PSDD4CCM_LOG_DEBUG ( "PSDataOut_Impl::finish_message_frame" << " | " << x11_logger::hexdump ((char*)data, (uint32_t)datalen)); if (::zmq_msg_init_data (msgframe, data, datalen, free_msg_data, os) != 0) { PSDD4CCM_LOG_PANIC ( "PSDataOut_Impl::finish_message_frame" << " - failed to initialize ZMQ message [" << ::zmq_strerror (::zmq_errno ()) << "]"); throw ::CCM_PSDD::InternalError ( "PSDataOut_Impl::finish_message_frame", "NO_MEMORY"); } } PSDataIn_Impl::PSDataIn_Impl (bool msg_envelope) : use_msg_envelope_ (msg_envelope) { } PSDataIn_Impl::~PSDataIn_Impl () { } ACE_InputCDR& PSDataIn_Impl::key_in () { return *this->is_key_; } ACE_InputCDR& PSDataIn_Impl::data_in () { return *(this->use_msg_envelope_ ? this->is_data_ : this->is_key_); } void PSDataIn_Impl::start_message (zmq_msg_t* key, zmq_msg_t* data) { PSDD4CCM_LOG_DEBUG ( "PSDataIn_Impl::start_message" << " - reading keyframe data | " << x11_logger::hexdump ( static_cast<char*> (::zmq_msg_data (key)), (uint32_t)::zmq_msg_size (key))); // initialize data streams size_t framelen = ::zmq_msg_size (key); try { // Create message block for the msg frame. Ensure that it is // aligned. ACE_Message_Block mb (ACE_CDR::MAX_ALIGNMENT + framelen); ACE_CDR::mb_align (&mb); // copy the message frame mb.copy (static_cast<char*> (::zmq_msg_data (key)), framelen); // create CDR input stream from message block this->is_key_ = std::make_unique<ACE_InputCDR> ( &mb); } catch (const std::bad_alloc&) { throw ::CCM_PSDD::InternalError ( "PSDataIn_Impl::start_message", "NO_MEMORY"); } PSDD4CCM_LOG_DEBUG ( "PSDataIn_Impl::start_message" << " - data read | " << x11_logger::hexdump ( static_cast<char*> (this->is_key_->rd_ptr()), (uint32_t)this->is_key_->length ())); if (this->use_msg_envelope_ && data) { PSDD4CCM_LOG_DEBUG ( "PSDataIn_Impl::start_message" << "- reading dataframe data | " << x11_logger::hexdump ( static_cast<char*> (::zmq_msg_data (data)), (uint32_t)::zmq_msg_size (data))); size_t framelen = ::zmq_msg_size (data); try { // Create message block for the msg frame. Ensure that it is // aligned. ACE_Message_Block mb (ACE_CDR::MAX_ALIGNMENT + framelen); ACE_CDR::mb_align (&mb); // copy the message frame mb.copy (static_cast<char*> (::zmq_msg_data (data)), framelen); // create CDR input stream from message block this->is_data_ = std::make_unique<ACE_InputCDR> ( &mb); } catch (const std::bad_alloc&) { throw ::CCM_PSDD::InternalError ( "PSDataIn_Impl::start_message", "NO_MEMORY"); } PSDD4CCM_LOG_DEBUG ( "PSDataIn_Impl::start_message" << " - data read | " << x11_logger::hexdump ( static_cast<char*> (this->is_data_->rd_ptr ()), (uint32_t)this->is_data_->length ())); } } void PSDataIn_Impl::set_byte_order (EByteOrder bo) { if (ACE_CDR_BYTE_ORDER != static_cast<int> (bo)) { this->is_key_->reset_byte_order (static_cast<int> (bo)); this->is_data_->reset_byte_order (static_cast<int> (bo)); } } } }
/** Add a new label to the list. You should call validateLabels() after calling this. */ public void addLabel(GUILabel lab) { lab.parse(); labels.add(lab); fireTableRowsInserted(labels.size()-1, labels.size()-1); parent.labelListChanged(); }
<gh_stars>1-10 package com.gmail.jakekinsella.robot; /** * Created by jakekinsella on 1/6/17. */ public class Angle { private double degrees; public Angle(double degrees) { this.degrees = degrees; } public Angle(double x1, double y1, double x2, double y2) { this.degrees = Math.toDegrees(Math.atan2(y2 - y1, x2 - x1)); } public double getDegrees() { return this.degrees; } public double getRadians() { return Math.toRadians(this.getDegrees()); } // degrees 0-360 public double getNormalizedDegrees() { if (this.getDegrees() >= 0) { return this.getDegrees() % 360; } return 360 + (this.getDegrees() % 360); } // Only used in PaddedLine public double getPaddedLineRadians() { return 1.5 * Math.PI - this.getRadians(); } // Random hack public double getRobotControlRadians() { return 2 * Math.PI - this.getRadians(); } public String toString() { return String.valueOf(this.getDegrees()); } public Angle calculateAngleBetween(Angle angle) { if (angle.getDegrees() > this.getDegrees()) { return new Angle(angle.getNormalizedDegrees() - this.getNormalizedDegrees()); } return new Angle(angle.getDegrees() - this.getDegrees()); } public boolean equals(Angle angle) { return this.getDegrees() == angle.getDegrees(); } public boolean checkIfAnglesClose(Angle angle, final double TOLERANCE) { return Math.abs(angle.getNormalizedDegrees() - this.getNormalizedDegrees()) < TOLERANCE; } }
def chr_comp(keys): if isinstance(keys, (list, tuple)): key = keys[0] elif isinstance(keys, compare_snps_to_genome.Chromosome): key = keys.number else: key = keys key = key.lower().replace("_", "") chr_num = key[3:] if key.startswith("chr") else key if chr_num == 'x': chr_num = 98 elif chr_num == 'y': chr_num = 99 elif chr_num.startswith('m'): chr_num = 100 else: try: chr_num = int(chr_num) except ValueError: chr_num = 101 return chr_num
On Quantifying the Complexity of IoT Traffic A single quantitative measure for characterizing the complexity of Internet-of-Things (IoT) network traffic remains elusive. Such a quantifier holds value in distinguishing the relative complexities of IoT network traffic in an increasingly heterogeneous IoT device landscape, enabling network operators to anticipate and provision for, as well as manage existing network infrastructure. In this paper, we apply relevant contemporary techniques used to quantify the complexity of regular network traffic to a modern, comprehensive empirical IoT network traffic dataset. The results were interpreted to discover their merits and pitfalls. Based on our findings, we contend there is a need to develop new quantitative measures that can accommodate the diversity of IoT traffic.
#include <bits/stdc++.h> #define ll long long #define re return using namespace std; int main(){ ll n,k; cin >> n >> k; ll a[n+10]; ll sum1=0; ll sum2=0; for (int i=1;i<=n;++i){ cin >> a[i]; if (a[i]==1) sum1++; else sum2++;} ll mx=0; for (int i=1;i<=k;++i){ ll ts1=0; ll ts2=0; for (int j=i;j<=n;j+=k){ if (a[j]==1) ts1++; else ts2++; } mx=max(mx,abs((sum1-ts1)-(sum2-ts2))); } cout << mx; re 0; }
import { VoidFunctionComponent } from 'react'; import Example from '../../../components/Example'; import CodeHighlight from '../../../components/CodeHighlight'; import FixedWidthExample from './FixedWidthExample'; import FixedWidthExampleRaw from './FixedWidthExample?raw'; const FixedWidth: VoidFunctionComponent = () => ( <Example id="fixed-width" title="Fixed Width Icons"> <FixedWidthExample/> <CodeHighlight> { FixedWidthExampleRaw } </CodeHighlight> </Example> ); export default FixedWidth;
<filename>OOP With C++/Chapter 16 'New Features of ANSI C++ Standard'/Example/16.2 Use of keyword mutable/main.cpp #include<iostream> #include<string> using namespace std; class ABC{ private : mutable int m; public: explicit ABC(int x=0){ m=x; } void change() const{ // constant function m+=10; } int display() const{ // constant function return m; } }; int main(){ const ABC abc(100); cout << "abc contains : "<< abc.display(); abc.change(); cout << "\nabc now contains : "<< abc.display(); return 0; }
/** * Created by George Cimpoies on 11/4/2016. */ public class InsectFactory extends SpeciesFactory { @Override public Animal getAnimal(String type) { if (Constants.Animals.Insects.Butterfly.equals(type)) { return new Butterfly(); } else if (Constants.Animals.Insects.Cockroach.equals(type)) { return new Cockroach(); } else if (Constants.Animals.Insects.Spider.equals(type)) { return new Spider(); } else return null; } }
package io.choerodon.kb.app.service.impl; import com.vladsch.flexmark.convert.html.FlexmarkHtmlParser; import io.choerodon.core.exception.CommonException; import io.choerodon.core.oauth.CustomUserDetails; import io.choerodon.core.oauth.DetailsHelper; import io.choerodon.kb.api.dao.*; import io.choerodon.kb.app.service.PageService; import io.choerodon.kb.app.service.WorkSpaceService; import io.choerodon.kb.domain.kb.repository.PageContentRepository; import io.choerodon.kb.domain.kb.repository.PageRepository; import io.choerodon.kb.infra.common.BaseStage; import io.choerodon.kb.infra.common.utils.PdfUtil; import io.choerodon.kb.infra.dataobject.PageContentDO; import io.choerodon.kb.infra.dataobject.PageDO; import io.choerodon.kb.infra.mapper.PageContentMapper; import org.apache.commons.io.IOUtils; import org.apache.pdfbox.util.Charsets; import org.docx4j.Docx4J; import org.docx4j.convert.out.HTMLSettings; import org.docx4j.openpackaging.packages.WordprocessingMLPackage; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.multipart.MultipartFile; import javax.servlet.http.HttpServletResponse; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; /** * Created by Zenger on 2019/4/30. */ @Service @Transactional(rollbackFor = Exception.class) public class PageServiceImpl implements PageService { public static final Logger LOGGER = LoggerFactory.getLogger(PageServiceImpl.class); public static final String SUFFIX_DOCX = ".docx"; public static final String FILE_ILLEGAL = "error.importDocx2Md.fileIllegal"; @Value("${services.attachment.url}") private String attachmentUrl; @Autowired private WorkSpaceService workSpaceService; @Autowired private PageRepository pageRepository; @Autowired private PageContentRepository pageContentRepository; @Autowired private PageContentMapper pageContentMapper; @Autowired private RestHighLevelClient highLevelClient; @Override public Boolean checkPageCreate(Long id) { PageDO pageDO = pageRepository.selectById(id); CustomUserDetails customUserDetails = DetailsHelper.getUserDetails(); return pageDO.getCreatedBy().equals(customUserDetails.getUserId()); } @Override public void exportMd2Pdf(Long organizationId, Long projectId, Long pageId, HttpServletResponse response) { PageInfo pageInfo = pageRepository.queryInfoById(organizationId, projectId, pageId); PdfUtil.markdown2Pdf(pageInfo.getTitle(), pageInfo.getContent(), response); } @Override public String importDocx2Md(Long organizationId, Long projectId, MultipartFile file, String type) { if (!file.getOriginalFilename().endsWith(SUFFIX_DOCX)) { throw new CommonException(FILE_ILLEGAL); } WordprocessingMLPackage wordMLPackage; try { wordMLPackage = Docx4J.load(file.getInputStream()); HTMLSettings htmlSettings = Docx4J.createHTMLSettings(); htmlSettings.setWmlPackage(wordMLPackage); ByteArrayOutputStream swapStream = new ByteArrayOutputStream(); Docx4J.toHTML(htmlSettings, swapStream, Docx4J.FLAG_EXPORT_PREFER_XSL); ByteArrayInputStream inputStream = new ByteArrayInputStream(swapStream.toByteArray()); String html = IOUtils.toString(inputStream, String.valueOf(Charsets.UTF_8)); String markdown = FlexmarkHtmlParser.parse(html); return markdown; } catch (Exception e) { throw new CommonException(e.getMessage()); } } @Override public PageDTO createPage(Long resourceId, PageCreateDTO create, String type) { //创建页面及空间("第一次创建版本为空") PageUpdateDTO pageUpdateDTO = new PageUpdateDTO(); pageUpdateDTO.setContent(create.getContent()); create.setContent(""); PageDTO pageDTO = workSpaceService.create(resourceId, create, type); //更新页面内容 pageUpdateDTO.setMinorEdit(false); pageUpdateDTO.setObjectVersionNumber(pageDTO.getObjectVersionNumber()); return workSpaceService.update(resourceId, pageDTO.getWorkSpace().getId(), pageUpdateDTO, type); } @Override public void autoSavePage(Long organizationId, Long projectId, Long pageId, PageAutoSaveDTO autoSave) { PageContentDO pageContent = queryDraftContent(organizationId, projectId, pageId); if (pageContent == null) { //创建草稿内容 pageContent = new PageContentDO(); pageContent.setPageId(pageId); pageContent.setVersionId(0L); pageContent.setContent(autoSave.getContent()); pageContentRepository.create(pageContent); } else { //修改草稿内容 pageContent.setContent(autoSave.getContent()); pageContentRepository.update(pageContent); } } @Override public PageContentDO queryDraftContent(Long organizationId, Long projectId, Long pageId) { pageRepository.checkById(organizationId, projectId, pageId); CustomUserDetails userDetails = DetailsHelper.getUserDetails(); Long userId = userDetails.getUserId(); PageContentDO pageContent = new PageContentDO(); pageContent.setPageId(pageId); pageContent.setVersionId(0L); pageContent.setCreatedBy(userId); List<PageContentDO> contents = pageContentMapper.select(pageContent); return contents.isEmpty() ? null : contents.get(0); } @Override public void deleteDraftContent(Long organizationId, Long projectId, Long pageId) { pageRepository.checkById(organizationId, projectId, pageId); CustomUserDetails userDetails = DetailsHelper.getUserDetails(); Long userId = userDetails.getUserId(); PageContentDO pageContent = new PageContentDO(); pageContent.setPageId(pageId); pageContent.setVersionId(0L); pageContent.setCreatedBy(userId); pageContentMapper.delete(pageContent); } }
<gh_stars>0 import utils import matplotlib.pyplot as plt from workspace_utils import keep_awake import torch from torch import nn from torch import optim import torch.nn.functional as F from torchvision import datasets, transforms, models import json from collections import OrderedDict device = None args = utils.get_input_args('train') if (args.gpu): device = 'cuda' else: device = 'cpu' data_dir = args.data_dir train_dir = data_dir + '/train' valid_dir = data_dir + '/valid' test_dir = data_dir + '/test' train_transforms = transforms.Compose([transforms.RandomRotation(30), transforms.RandomResizedCrop(224), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]) valid_transforms = transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]) test_transforms = transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]) train_datasets = datasets.ImageFolder(train_dir, transform=train_transforms) valid_datasets = datasets.ImageFolder(valid_dir, transform=valid_transforms) test_datasets = datasets.ImageFolder(test_dir, transform=test_transforms) trainloaders = torch.utils.data.DataLoader(train_datasets, batch_size=32, shuffle=True) validloaders = torch.utils.data.DataLoader(valid_datasets, batch_size=32) testloaders = torch.utils.data.DataLoader(test_datasets, batch_size=32) def build_vgg16(): od = OrderedDict() model = models.vgg16(pretrained=True) for params in model.parameters(): params.requires_grad = False count = 1 last_hidden_unit = None for hidden_unit in args.hidden_units: if count == 1: od['fc'+str(count)] = nn.Linear(25088, hidden_unit) else: od['fc'+str(count)] = nn.Linear(last_hidden_unit, hidden_unit) last_hidden_unit = hidden_unit od['relu'+str(count)] = nn.ReLU(inplace=True) od['dropout'+str(count)] = nn.Dropout() count += 1 od['fc'+str(count)] = nn.Linear(last_hidden_unit, 102) od['softmax'] = nn.LogSoftmax(dim=1) classifier = nn.Sequential(od) model.classifier = classifier return model def build_resnet50(): od = OrderedDict() model = models.resnet50(pretrained=True) for params in model.parameters(): params.requires_grad = False count = 1 last_hidden_unit = None for hidden_unit in args.hidden_units: if count == 1: od['fc'+str(count)] = nn.Linear(2048, hidden_unit) else: od['fc'+str(count)] = nn.Linear(last_hidden_unit, hidden_unit) last_hidden_unit = hidden_unit od['relu'+str(count)] = nn.ReLU(inplace=True) od['dropout'+str(count)] = nn.Dropout() count += 1 od['fc'+str(count)] = nn.Linear(last_hidden_unit, 102) od['softmax'] = nn.LogSoftmax(dim=1) classifier = nn.Sequential(od) model.fc = classifier return model def validate(validloaders, device, model, criterion): valid_loss = 0 valid_accuracy = 0 num = 0 for images, labels in validloaders: images, labels = images.to(device), labels.to(device) outputs = model.forward(images) valid_loss += criterion(outputs, labels).item() ps = torch.exp(outputs) equality = (ps.max(dim=1)[1] == labels.data) valid_accuracy += equality.type(torch.FloatTensor).mean() return valid_loss/len(validloaders), valid_accuracy/len(validloaders) def train(): epoch = args.epoch print_every = 40 step = 0 running_loss = 0 for e in keep_awake(range(epoch)): model.train() for images, labels in trainloaders: step += 1 images, labels = images.to(device), labels.to(device) optimizer.zero_grad() outputs = model.forward(images) loss = criterion(outputs, labels) loss.backward() optimizer.step() running_loss += loss.item() if step % print_every == 0: model.eval() with torch.no_grad(): valid_loss, valid_accuracy = validate(validloaders, device, model, criterion) print("Epoch: {}/{}.. ".format(e+1, epoch), "Training Loss: {:.3f}.. ".format(running_loss/print_every), "Validation Loss: {:.3f}.. ".format(valid_loss), "Validation Accuracy: {:.3f}".format(valid_accuracy)) running_loss = 0 model.train() def test(): model.eval() with torch.no_grad(): test_accuracy = 0 num = 0 for images, labels in testloaders: num += 1 images, labels = images.to(device), labels.to(device) outputs = model.forward(images) ps = torch.exp(outputs) equality = (ps.max(dim=1)[1] == labels.data) test_accuracy += equality.type(torch.FloatTensor).mean() print("Test Accuracy: {:.2f}%".format(test_accuracy/num*100)) def save(save_dir): model.class_to_idx = train_datasets.class_to_idx if (args.arch == 'vgg16'): checkpoint = { 'model': 'vgg16', 'epoch': args.epoch, 'optimizer_state_dict': optimizer.state_dict(), 'learn_rate': args.learning_rate, 'class_to_idx': model.class_to_idx, 'classifier': model.classifier} elif (args.arch == 'resnet50'): checkpoint = { 'model': 'resnet50', 'epoch': args.epoch, 'optimizer_state_dict': optimizer.state_dict(), 'learn_rate': args.learning_rate, 'class_to_idx': model.class_to_idx, 'classifier': model.fc, 'model_state_dict': model.state_dict()} torch.save(checkpoint, save_dir) model = None optimizer = None if (args.arch == 'vgg16'): model = build_vgg16() optimizer = optim.Adam(model.classifier.parameters(), lr=args.learning_rate) elif (args.arch == 'resnet50'): model = build_resnet50() optimizer = optim.Adam(model.fc.parameters(), lr=args.learning_rate) else: print('Please select between vgg16 and resnet50 for architechture') sys.exit() criterion = nn.NLLLoss() model.to(device) train() test() save(args.save_dir)
/** * Adapter for displaying list of quests * * TODO ViewHandler for better performance * TODO ClickHandler on title * TODO Click handling on quest item */ public class QuestArrayAdapter extends ArrayAdapter<Quest> { private final String TAG = QuestArrayAdapter.class.getSimpleName(); /** * * @param context Activity context * @param objects List objects */ public QuestArrayAdapter(Context context, List<Quest> objects) { super(context, R.layout.quest_list_row, objects); } /** * Custom view generation for displaying a Quest item in the list * @param position * @param convertView * @param parent * @return */ @Override public View getView(int position, View convertView, ViewGroup parent) { //Item to display Quest quest = getItem(position); ViewHolder viewHolder; //cache // Check if an existing view is being reused, otherwise inflate the view if (convertView == null) { viewHolder = new ViewHolder(); convertView = LayoutInflater.from(getContext()).inflate(R.layout.quest_list_row, parent, false); viewHolder.tvTitle = (TextView) convertView.findViewById(R.id.quest_title); viewHolder.llBadges = (LinearLayout)convertView.findViewById(R.id.badge_list); convertView.setTag(viewHolder); } else { //View is generated based on a cached (already built) row viewHolder = (ViewHolder)convertView.getTag(); viewHolder.llBadges.removeAllViews(); //Removing all children because view needs to be dynamically generated } //Updates viewHolder.tvTitle.setText(quest.getTitle()); //Calc padding int padding_in_dp = 2; // TODO get from config final float scale = getContext().getResources().getDisplayMetrics().density; int padding_in_px = (int) (padding_in_dp * scale + 0.5f); //Iterate through achievements and display badge according to fulfillment for (Achievement a : quest.getAchievements()) { ImageView iv = new ImageView(getContext()); if (a.isAchieved()) { iv.setImageResource(R.drawable.quest_achievement); } else { iv.setImageResource(R.drawable.quest_no_achievement); } iv.setPadding(0,0,padding_in_px,0); viewHolder.llBadges.addView(iv); } return convertView; } class ViewHolder { TextView tvTitle; LinearLayout llBadges; } }
def sum_juvenile(self): return ( self.no_juvenile_male or 0 ) + ( self.no_juvenile_female or 0 ) + ( self.no_juvenile_unknown or 0 ) or 0
def _match_unfinished_file_if_possible( self, bucket_id, file_name, file_info, emerge_parts_dict, encryption: EncryptionSetting, file_retention: Optional[FileRetentionSetting] = None, legal_hold: Optional[LegalHold] = None, ): file_retention = file_retention or NO_RETENTION_FILE_SETTING for file_ in self.services.large_file.list_unfinished_large_files( bucket_id, prefix=file_name ): if file_.file_name != file_name: continue if file_.file_info != file_info: continue if encryption is not None and encryption != file_.encryption: continue if legal_hold is None: if LegalHold.UNSET != file_.legal_hold: continue elif legal_hold != file_.legal_hold: continue if file_retention != file_.file_retention: continue files_match = True finished_parts = {} for part in self.services.large_file.list_parts(file_.file_id): emerge_part = emerge_parts_dict.get(part.part_number) if emerge_part is None: files_match = False break if emerge_part.get_length() != part.content_length: files_match = False break assert emerge_part.is_hashable() sha1_sum = emerge_part.get_sha1() if sha1_sum != part.content_sha1: files_match = False break finished_parts[part.part_number] = part if not files_match or not finished_parts: continue return file_, finished_parts return None, {}
def execute(self): if self.active: should_publish = self.check_whether_to_publish() if should_publish: self.publish() if self.has_errors: self.send_failure_notification() self.cleanup()
package in.mahabhujal.mahabhujal; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.os.Bundle; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.RadioButton; import android.widget.RadioGroup; import android.widget.Toast; import androidx.appcompat.app.AppCompatActivity; import in.mahabhujal.mahabhujal.borewellApi.BorewellWaterLevelApi; import in.mahabhujal.mahabhujal.model.BorewellWaterLevelModel; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.gson.GsonConverterFactory; public class BorewellLevel extends AppCompatActivity { SQLiteOpenHelper openHelperLevel; SQLiteDatabase db; private EditText borewellid; private EditText date; private EditText variation; private EditText preWaterLevel; private EditText postWaterLevel; private EditText potentWaterLevel; private RadioGroup available; private RadioButton yes; private RadioButton no; private Button Submit_waterlevel; public String available_water; String value; String url = "http://dbitbeproject.herokuapp.com/"; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_borewell_water_level); //openHelperLevel = new DatabaseHelperBorewell(this); borewellid = (EditText) findViewById(R.id.b_water_level_code); date = (EditText) findViewById(R.id.borewell_date); variation = (EditText) findViewById(R.id.b_variation); preWaterLevel = (EditText) findViewById(R.id.borewell_pre_monsoon); postWaterLevel = (EditText) findViewById(R.id.borewell_post_monsoon); potentWaterLevel = (EditText) findViewById(R.id.borewell_potent); available=(RadioGroup)findViewById(R.id.water_available_yesno); yes=(RadioButton)findViewById(R.id.bore_available_yes); no=(RadioButton)findViewById(R.id.bore_available_no); Submit_waterlevel = (Button) findViewById(R.id.submit_bore_waterlever); available.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() { @Override public void onCheckedChanged(RadioGroup group, int checkedId) { if (yes.isChecked()) { value = yes.getText().toString(); } else if (no.isChecked()) { value = no.getText().toString(); }else { Toast.makeText(BorewellLevel.this, "Select value for availability", Toast.LENGTH_SHORT).show(); } } }); Submit_waterlevel.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { process(); /* db = openHelperLevel.getWritableDatabase(); String BCode = B_code.getText().toString(); String RecordDate = Record_date.getText().toString(); String Variation = BVariation.getText().toString(); String PreMonsoon = BPreMonsoon.getText().toString(); String PostMonsoon = BPostMonsoon.getText().toString(); String Potentionmeter = BPotentionmeter.getText().toString(); insertdatalevel(BCode, RecordDate, Variation, PreMonsoon, PostMonsoon, Potentionmeter); Toast.makeText(getApplicationContext(), "Data added successfully", Toast.LENGTH_LONG).show(); */ } }); } private void process() { Retrofit retrofit = new Retrofit.Builder() .baseUrl(url) .addConverterFactory(GsonConverterFactory.create()) .build(); BorewellWaterLevelApi api=retrofit.create(BorewellWaterLevelApi.class); Call<BorewellWaterLevelModel> call=api.borewellwaterlevel(borewellid.getText().toString(),date.getText().toString(),preWaterLevel.getText().toString(),postWaterLevel.getText().toString(),potentWaterLevel.getText().toString(),value); call.enqueue(new Callback<BorewellWaterLevelModel>() { @Override public void onResponse(Call<BorewellWaterLevelModel> call, Response<BorewellWaterLevelModel> response) { borewellid.setText(""); date.setText(""); preWaterLevel.setText(""); postWaterLevel.setText(""); potentWaterLevel.setText(""); variation.setText(""); Toast.makeText(getApplicationContext(),response.toString(),Toast.LENGTH_LONG).show(); } @Override public void onFailure(Call<BorewellWaterLevelModel> call, Throwable t) { Toast.makeText(getApplicationContext(),t.toString(),Toast.LENGTH_LONG).show(); } }); } } /* private void insertdatalevel(String BCode, String RecordDate, String Variation, String PreMonsoon, String PostMonsoon, String Potentionmeter) { ContentValues contentValues2=new ContentValues(); contentValues2.put(DatabaseHelperBorewell.COL_LEVEL_1,BCode); contentValues2.put(DatabaseHelperBorewell.COL_LEVEL_2,RecordDate); contentValues2.put(DatabaseHelperBorewell.COL_LEVEL_3,Variation); contentValues2.put(DatabaseHelperBorewell.COL_LEVEL_4,PreMonsoon); contentValues2.put(DatabaseHelperBorewell.COL_LEVEL_5,PostMonsoon); contentValues2.put(DatabaseHelperBorewell.COL_LEVEL_6,Potentionmeter); long id = db.insert(DatabaseHelperBorewell.TABLE_BWATERLEVEL,null,contentValues2); } */
#include <bits/stdc++.h> using namespace std; const long long mod = 1000000007; int n, b[500005], id[500005]; long long a[500005], fl[500005], fr[500005]; struct BIT { int n; vector<long long> t; void init(int m) { n = m; t.clear(); t.resize(m + 2); } void update(int p, long long x) { for (; p <= n; p += p & -p) t[p] = (t[p] + x) % mod; } long long get(int p) { long long res = 0; for (; p; p -= p & -p) res = (res + t[p]) % mod; return res; } } bit; bool cmp(const int &x, const int &y) { return a[x] < a[y]; } int main() { #ifndef ONLINE_JUDGE freopen("CODEFORCES.INP", "r", stdin); freopen("CODEFORCES.OUT", "w", stdout); #endif ios_base::sync_with_stdio(0); cin.tie(0); cout.tie(0); cin >> n; for (int i = 1; i <= n; i++) { cin >> a[i]; id[i] = i; } sort(id + 1, id + n + 1, cmp); for (int i = 1; i <= n; i++) b[id[i]] = i; bit.init(n); for (int i = 1; i <= n; i++) { fl[i] = bit.get(b[i] - 1) * (n - i + 1) % mod; bit.update(b[i], i); } bit.init(n); for (int i = n; i > 0; i--) { fr[i] = bit.get(b[i] - 1) * i % mod; bit.update(b[i], n - i + 1); } long long res = 0; for (int i = 1; i <= n; i++) { res = (res + a[i] * (fl[i] + fr[i]) % mod) % mod; res = (res + a[i] * i % mod * (n - i + 1)) % mod; } cout << res; }
<reponame>AlpacaTravel/react-maki-icons import React from 'react'; import createSvgIcon from '../utils/create-svg-icon'; export const paths:string[] = ['M616 791l290 145-42 82-352-175-352 176-42-83 290-145-289-144 42-83 351 175 352-176 41 84-289 144z m175-492v27l-93 93v93l-186 93-186-93v-93l-93-93v-47c-6-149 111-274 260-279 6 0 13 0 19 0 159 5 284 137 279 296 0 1 0 2 0 3z m-353-47c0-40-32-73-73-73s-74 33-74 73c0 41 33 74 74 74l0 0c41 0 73-33 73-74z m27 167h-46v93h46v-93z m140 0h-46v93h46v-93z m127-167c0-40-33-73-74-73s-73 33-73 73 33 74 73 74l0 0c41 0 74-33 74-74z']; export default createSvgIcon( [<path key='s-0' d={paths[0]}/>], 'Danger11' );
/** * Contains some methods for string manipulation. */ public class StringUtilities { /** Default rounding precision (digits after decimal point) */ public static final int DEFAULT_ROUND_PRECISION = 16; private static DecimalFormat df; private static DateFormat dateFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy"); static { Locale.setDefault(Locale.US); df = new DecimalFormat(); df.setGroupingUsed(false); } /** * Checks if a string contains illegal characters with respect to the XML * specification. * * @param text the text to be checked * * @return true if illegal chars contained, otherwise false */ public static boolean containsIllegalChars(String text) { int size = text.length(); boolean illegal = false; for (int i = 0; i < size; i++) { char c = text.charAt(i); if (isIllegal(c)) { illegal = true; break; } } return illegal; } /** * Converts a double to a string. You can specify the rounding precision, * i.e. the number of digits after the decimal point. * * @param d double to be converted * @param precision rounding precision * * @return a string representing the passed double value */ public static String doubleToString(double d, int precision) { synchronized (df) { df.setMaximumFractionDigits(precision); return df.format(d); } } /** * Converts a double to a string. The default rounding precision is used. * * @param d double to be converted * * @return a string representing the passed double value */ public static String doubleToString(double d) { return doubleToString(d, DEFAULT_ROUND_PRECISION); } public static String dateToString(Date date) { return dateFormat.format(date); } public static Date parseDate(String str) throws ParseException { return dateFormat.parse(str); } /** * Checks if the character is illegal with respect to the XML * specification. * * @param c the character to be checked * * @return true if character is illegal */ public static boolean isIllegal(char c) { return ((c < 32) && (c != '\t') && (c != '\n') && (c != '\r')); } }
import { IdentifierReference } from './identifierreference'; import { Node } from '../node'; import { Expression, PropertyKey } from '.'; import { AssignmentExpression } from './assignment-expr'; import { BindingElement } from './binding-element'; import { BindingIdentifier } from './binding-identifier'; import { AssignmentElement } from './assignment-element'; import { IdentifierName } from './identifiername'; /** * Property name */ export interface PropertyName extends Node<'PropertyName'> { readonly key: PropertyKey | IdentifierReference | IdentifierName; readonly value: AssignmentExpression | BindingElement | AssignmentElement | BindingIdentifier | Expression; } export function createPropertyName( key: PropertyKey, value: AssignmentExpression | BindingElement | AssignmentElement | BindingIdentifier ): PropertyName { return { type: 'PropertyName', key, value }; }
Special Collections Roadshow — Episode Seven: Housewife Special Collections Roadshow was created by the Civil War Institute at Gettysburg College in the Spring of 2014. It showcases various artifacts from Special Collections at Gettysburg College. The seventh episode features Megan McNish ’16 comparing the housewife from Lewis Tway’s collection to another housewife we just received from Rev. Theodore Schlach’s new collection in Special Collections.
/// Initialises a new Timer with the command /// line arguments passed in the argument `args`. pub fn init(args: Vec<String>) -> Self { let mut tmr = Tima::new(); args.into_iter() .map(Tima::convert_arguments) .for_each(Tima::create_tima(&mut tmr)); tmr }
//===-- circt-c/Dialect/HW.h - C API for HW dialect ---------------*- C -*-===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This header declares the C interface for registering and accessing the // HW dialect. A dialect should be registered with a context to make it // available to users of the context. These users must load the dialect // before using any of its attributes, operations or types. Parser and pass // manager can load registered dialects automatically. // //===----------------------------------------------------------------------===// #ifndef CIRCT_C_DIALECT_HW_H #define CIRCT_C_DIALECT_HW_H #include "mlir-c/IR.h" #ifdef __cplusplus extern "C" { #endif struct HWStructFieldInfo { MlirIdentifier name; MlirType type; }; typedef struct HWStructFieldInfo HWStructFieldInfo; //===----------------------------------------------------------------------===// // Dialect API. //===----------------------------------------------------------------------===// MLIR_DECLARE_CAPI_DIALECT_REGISTRATION(HW, hw); //===----------------------------------------------------------------------===// // Type API. //===----------------------------------------------------------------------===// /// Return the hardware bit width of a type. Does not reflect any encoding, /// padding, or storage scheme, just the bit (and wire width) of a /// statically-size type. Reflects the number of wires needed to transmit a /// value of this type. Returns -1 if the type is not known or cannot be /// statically computed. MLIR_CAPI_EXPORTED int64_t hwGetBitWidth(MlirType); /// Return true if the specified type can be used as an HW value type, that is /// the set of types that can be composed together to represent synthesized, /// hardware but not marker types like InOutType or unknown types from other /// dialects. MLIR_CAPI_EXPORTED bool hwTypeIsAValueType(MlirType); /// If the type is an HW array MLIR_CAPI_EXPORTED bool hwTypeIsAArrayType(MlirType); /// If the type is an HW inout. MLIR_CAPI_EXPORTED bool hwTypeIsAInOut(MlirType type); /// If the type is an HW struct. MLIR_CAPI_EXPORTED bool hwTypeIsAStructType(MlirType); /// If the type is an HW type alias. MLIR_CAPI_EXPORTED bool hwTypeIsATypeAliasType(MlirType); /// If the type is an HW int. MLIR_CAPI_EXPORTED bool hwTypeIsAIntType(MlirType); /// Creates a fixed-size HW array type in the context associated with element MLIR_CAPI_EXPORTED MlirType hwArrayTypeGet(MlirType element, size_t size); /// returns the element type of an array type MLIR_CAPI_EXPORTED MlirType hwArrayTypeGetElementType(MlirType); /// returns the size of an array type MLIR_CAPI_EXPORTED intptr_t hwArrayTypeGetSize(MlirType); /// Creates an HW inout type in the context associated with element. MLIR_CAPI_EXPORTED MlirType hwInOutTypeGet(MlirType element); /// Returns the element type of an inout type. MLIR_CAPI_EXPORTED MlirType hwInOutTypeGetElementType(MlirType); /// Creates an HW struct type in the context associated with the elements. MLIR_CAPI_EXPORTED MlirType hwStructTypeGet(MlirContext ctx, intptr_t numElements, HWStructFieldInfo const *elements); MLIR_CAPI_EXPORTED MlirType hwStructTypeGetField(MlirType structType, MlirStringRef fieldName); MLIR_CAPI_EXPORTED MlirType hwParamIntTypeGet(MlirAttribute parameter); MLIR_CAPI_EXPORTED MlirAttribute hwParamIntTypeGetWidthAttr(MlirType); MLIR_CAPI_EXPORTED HWStructFieldInfo hwStructTypeGetFieldNum(MlirType structType, unsigned idx); MLIR_CAPI_EXPORTED intptr_t hwStructTypeGetNumFields(MlirType structType); MLIR_CAPI_EXPORTED MlirType hwTypeAliasTypeGet(MlirStringRef scope, MlirStringRef name, MlirType innerType); MLIR_CAPI_EXPORTED MlirType hwTypeAliasTypeGetCanonicalType(MlirType typeAlias); MLIR_CAPI_EXPORTED MlirType hwTypeAliasTypeGetInnerType(MlirType typeAlias); MLIR_CAPI_EXPORTED MlirStringRef hwTypeAliasTypeGetName(MlirType typeAlias); MLIR_CAPI_EXPORTED MlirStringRef hwTypeAliasTypeGetScope(MlirType typeAlias); //===----------------------------------------------------------------------===// // Attribute API. //===----------------------------------------------------------------------===// MLIR_CAPI_EXPORTED bool hwAttrIsAInnerRefAttr(MlirAttribute); MLIR_CAPI_EXPORTED MlirAttribute hwInnerRefAttrGet(MlirAttribute moduleName, MlirAttribute innerSym); MLIR_CAPI_EXPORTED MlirAttribute hwInnerRefAttrGetName(MlirAttribute); MLIR_CAPI_EXPORTED MlirAttribute hwInnerRefAttrGetModule(MlirAttribute); MLIR_CAPI_EXPORTED bool hwAttrIsAGlobalRefAttr(MlirAttribute); MLIR_CAPI_EXPORTED MlirAttribute hwGlobalRefAttrGet(MlirAttribute symName); MLIR_CAPI_EXPORTED bool hwAttrIsAParamDeclAttr(MlirAttribute); MLIR_CAPI_EXPORTED MlirAttribute hwParamDeclAttrGet(MlirStringRef name, MlirType type, MlirAttribute value); MLIR_CAPI_EXPORTED MlirStringRef hwParamDeclAttrGetName(MlirAttribute decl); MLIR_CAPI_EXPORTED MlirType hwParamDeclAttrGetType(MlirAttribute decl); MLIR_CAPI_EXPORTED MlirAttribute hwParamDeclAttrGetValue(MlirAttribute decl); MLIR_CAPI_EXPORTED bool hwAttrIsAParamDeclRefAttr(MlirAttribute); MLIR_CAPI_EXPORTED MlirAttribute hwParamDeclRefAttrGet(MlirContext ctx, MlirStringRef cName); MLIR_CAPI_EXPORTED MlirStringRef hwParamDeclRefAttrGetName(MlirAttribute decl); MLIR_CAPI_EXPORTED MlirType hwParamDeclRefAttrGetType(MlirAttribute decl); MLIR_CAPI_EXPORTED bool hwAttrIsAParamVerbatimAttr(MlirAttribute); MLIR_CAPI_EXPORTED MlirAttribute hwParamVerbatimAttrGet(MlirAttribute text); #ifdef __cplusplus } #endif #endif // CIRCT_C_DIALECT_HW_H
/** * Calls reconfig on the client corresponding to id to add or remove * the given servers. Tests appropriately to make sure the * reconfig succeeded. */ private void testReconfig(int id, boolean adding, ArrayList<String> servers) throws Exception { if (adding) { ReconfigTest.reconfig(zkHandles[id], servers, null, null, -1); for (String server : servers) { int id2 = Integer.parseInt(server.substring(7, 8)); ReconfigTest.testNormalOperation(zkHandles[id], zkHandles[id2]); } ReconfigTest.testServerHasConfig(zkHandles[id], servers, null); } else { ReconfigTest.reconfig(zkHandles[id], null, servers, null, -1); ReconfigTest.testServerHasConfig(zkHandles[id], null, servers); } }
def smart_extend(dict_inst, *dict_arr): for item in dict_arr: dict_inst.update(item) return dict_inst
<gh_stars>0 /* * Created on Dec 28, 2004 * */ package aima.probability; import java.util.Hashtable; /** * @author <NAME> * */ public class Query { private String queryVariable; private Hashtable<String, Boolean> evidenceVariables; public Query(String queryVariable, String[] evidenceVariables, boolean[] evidenceValues) { this.queryVariable = queryVariable; this.evidenceVariables = new Hashtable<String, Boolean>(); for (int i = 0; i < evidenceVariables.length; i++) { this.evidenceVariables.put(evidenceVariables[i], new Boolean( evidenceValues[i])); } } public Hashtable<String, Boolean> getEvidenceVariables() { return evidenceVariables; } public String getQueryVariable() { return queryVariable; } }
/* Display name of executing routine. */ static void TraceNames( ExecToken Token, cell_t Level ) { char *DebugName; cell_t i; if( ffTokenToName( Token, &DebugName ) ) { cell_t NumSpaces; if( gCurrentTask->td_OUT > 0 ) EMIT_CR; EMIT( '>' ); for( i=0; i<Level; i++ ) { MSG( " " ); } TypeName( DebugName ); NumSpaces = 30 - gCurrentTask->td_OUT; for( i=0; i < NumSpaces; i++ ) { EMIT( ' ' ); } ffDotS(); } else { MSG_NUM_H("Couldn't find Name for ", Token); } }
<gh_stars>1-10 """Caching and History API test for KAOS.""" from collections import namedtuple import re import logging from ddt import ddt, data, file_data from kaos.algorithm.visibility_finder import VisibilityFinder from kaos.models import Satellite from kaos.models.parser import parse_ephemeris_file from kaos.utils.time_conversion import utc_to_unix from .. import KaosTestCase @ddt class TestVisibilityApi(KaosTestCase): """Test class for the visibility API.""" @classmethod def setUpClass(cls): """Add default test responses.""" super(TestVisibilityApi, cls).setUpClass() parse_ephemeris_file("ephemeris/Radarsat2.e") @data(('test/test_data/vancouver.test', ('20180101T00:00:00.0', '20180101T02:00:00.0'), 60), ('test/test_data/vancouver.test', ('20180104T00:00:00.0', '20180107T11:59:59.0'), 60), ('test/test_data/vancouver.test', ('20180102T00:00:00.0', '20180102T11:59:59.0'), 60)) def test_visibility(self, test_data): """Tests that the visibility finder produces the same results as the access file. Args: test_data (tuple): A three tuple containing the: 1 - The path of KAOS access test file 2 - A tuple of the desired test duration 3 - The maximum tolerated deviation in seconds Note: Ranges provided must not start or end at an access boundary. This is a result of the strict checking method provided. """ access_file, interval, max_error = test_data posix_interval = (utc_to_unix(interval[0]), utc_to_unix(interval[1])) access_info = self.parse_access_file(access_file, posix_interval) satellite_id = Satellite.get_by_name(access_info.sat_name)[0].platform_id request = {'Target': access_info.target, 'POI': {'startTime': interval[0], 'endTime': interval[1]}, 'PlatformID': [satellite_id]} with self.app.test_client() as client: response = client.post('/visibility/search', json=request) self.assertTrue(response.is_json) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.json['Opportunities']), len(access_info.accesses)) for oppertunity in response.json['Opportunities']: self.assertEqual(satellite_id, oppertunity['PlatformID']) predicted_access = (oppertunity['start_time'], oppertunity['end_time']) interval = posix_interval found = False for actual_access in access_info.accesses: if (interval[0] > actual_access[0]) and (interval[1] < actual_access[1]): if ((abs(interval[0] - predicted_access[0]) < max_error) and (abs(interval[1] - predicted_access[1]) < max_error)): found = True break if interval[0] > actual_access[0]: if ((abs(interval[0] - predicted_access[0]) < max_error) and (abs(actual_access[1] - predicted_access[1]) < max_error)): found = True break elif interval[1] < actual_access[1]: if ((abs(actual_access[0] - predicted_access[0]) < max_error) and (abs(interval[1] - predicted_access[1]) < max_error)): found = True break if ((abs(actual_access[0] - predicted_access[0]) < max_error) and (abs(actual_access[1] - predicted_access[1]) < max_error)): found = True break if not found: raise Exception('Wrong access: {}'.format(predicted_access)) @file_data("test_data_visibility.json") def test_visibility_incorrect_input(self, Target, POI, PlatformID, Reasons): request = {'Target': Target, 'POI': POI, 'PlatformID': PlatformID} with self.app.test_client() as client: response = client.post('/visibility/search', json=request) logging.debug("Response was: %s\nCode: %s\n", response.json, response.status) self.assertTrue(response.is_json) self.assertEqual(response.status_code, 422) for reason in Reasons: if reason not in response.json["reasons"]: self.assertTrue(False, msg="Missing reason in response: {}".format(reason)) @ddt class TestOpertunityApi(KaosTestCase): """Test class for the opportunity API.""" @classmethod def setUpClass(cls): """Add default test responses.""" super(TestOpertunityApi, cls).setUpClass() parse_ephemeris_file("ephemeris/Radarsat2.e") @data(('test/test_data/vancouver_area.test', ('20180101T00:00:00.0', '20180102T00:00:00.0'), 60), ('test/test_data/vancouver_area.test', ('20180103T00:00:00.0', '20180105T00:00:00.0'), 60)) def test_visibility(self, test_data): """Tests that the visibility finder produces the same results as the access file. Args: test_data (tuple): A three tuple containing the: 1 - The path of KAOS access test file 2 - A tuple of the desired POI 3 - The maximum tolerated deviation in seconds Note: Ranges provided must not start or end at an access boundary. This is a result of the strict checking method provided. """ access_file, interval, max_error = test_data posix_interval = (utc_to_unix(interval[0]), utc_to_unix(interval[1])) access_info = self.parse_access_file(access_file, posix_interval) satellite_id = Satellite.get_by_name(access_info.sat_name)[0].platform_id request = {'TargetArea': access_info.target, 'POI': { 'startTime': interval[0], 'endTime': interval[1]}, 'PlatformID': [satellite_id]} with self.app.test_client() as client: response = client.post('/opportunity/search', json=request) self.assertTrue(response.is_json) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.json['Opportunities']), len(access_info.accesses)) for oppertunity in response.json['Opportunities']: self.assertEqual(satellite_id, oppertunity['PlatformID']) predicted_access = (oppertunity['start_time'], oppertunity['end_time']) interval = posix_interval found = False for actual_access in access_info.accesses: if (interval[0] > actual_access[0]) and (interval[1] < actual_access[1]): if ((abs(interval[0] - predicted_access[0]) < max_error) and (abs(interval[1] - predicted_access[1]) < max_error)): found = True break if interval[0] > actual_access[0]: if ((abs(interval[0] - predicted_access[0]) < max_error) and (abs(actual_access[1] - predicted_access[1]) < max_error)): found = True break elif interval[1] < actual_access[1]: if ((abs(actual_access[0] - predicted_access[0]) < max_error) and (abs(interval[1] - predicted_access[1]) < max_error)): found = True break if ((abs(actual_access[0] - predicted_access[0]) < max_error) and (abs(actual_access[1] - predicted_access[1]) < max_error)): found = True break if not found: raise Exception('Wrong access: {}'.format(predicted_access)) @file_data("test_data_opportunity.json") def test_opportunity_incorrect_input(self, TargetArea, POI, PlatformID, Reasons): request = {'TargetArea': TargetArea, 'POI': POI, 'PlatformID': PlatformID} with self.app.test_client() as client: response = client.post('/opportunity/search', json=request) logging.debug("Response was: %s\nCode: %s\n", response.json, response.status) self.assertTrue(response.is_json) self.assertEqual(response.status_code, 422) for reason in Reasons: if reason not in response.json["reasons"]: self.assertTrue(False, msg="Missing reason in response: {}".format(reason))