content
stringlengths
10
4.9M
<reponame>davran-siv/exchange-front<filename>src/app/containers/createAdSteps/secondStep/SecondStep.tsx import { Button, FormControlLabel, Radio } from '@material-ui/core' import { CustomTextField, RadioGroupCustom, SimpleFileUpload } from 'app/components/common/formInput' import { AdType } from 'app/constants' import { Field, Form, Formik } from 'formik' import React from 'react' import * as Yup from 'yup' const validationSchema = Yup.object().shape({ type: Yup.string().required('Выберите категорию'), title: Yup.string().required('Заголовок обязателен'), }) const CreateAdSecondStep = (props: any) => { return ( <Formik initialValues={{ type: AdType.service, title: '', description: '', images: [] }} validationSchema={validationSchema} onSubmit={(values) => { console.log(values) }} > {({ values, errors, submitForm, setFieldValue, handleSubmit, isSubmitting }) => { return ( <Form> <Field name="type" component={RadioGroupCustom} row> <FormControlLabel value={AdType.product} control={<Radio disabled={isSubmitting}/>} label="Товар" disabled={isSubmitting} labelPlacement="start" /> <FormControlLabel value={AdType.service} control={<Radio disabled={isSubmitting}/>} label="Услуга" disabled={isSubmitting} labelPlacement="start" /> </Field> <div> <Field name="title" label='Заголовок' component={CustomTextField} /> </div> <div> <Field name="description" label='Описание' rowsMax="4" multiline component={CustomTextField} /> </div> <div> <Field name="images[0]" component={SimpleFileUpload} /> </div> <div> <Field name="images[1]" component={SimpleFileUpload} /> </div> <Button onClick={submitForm}>Дальше</Button> </Form> ) }} </Formik> ) } export default CreateAdSecondStep
#include <stdlib.h> #include "player.h" #include <math.h> #define PUBLIC #define SPEED 2 #define ANIMATIONSPEED 8 //lower = faster #define HEALTH 100 #define ROTATION_UPDATE_SPEED 5 #define SNAP_DISTANCE 10 #define PLAYER_CENTER_OFFSET_X 20 #define PLAYER_CENTER_OFFSET_Y 32 #define PLAYER_RADIUS 20 struct Player_type { int health; double speed; double diaSpeed; double posX, posY; int frame; int frameCounter; int isMoving; double direction; bool active; bool alive; int newX; int newY; double xSpeed, ySpeed; int newDirection; Weapon gun; int lives; double shotAngle; int kills; }; PUBLIC Player createPlayer(int x, int y) { Player a = malloc(sizeof(struct Player_type)); a->health = HEALTH; a->speed = SPEED; a->diaSpeed = sqrt(SPEED * SPEED / 2); a->frame = 0; a->isMoving = 0; a->frameCounter = 0; a->direction = 0; a->posX = x; a->posY = y; a->newX = x; a->newY = y; a->active = false; a->alive = false; a->xSpeed = a->ySpeed = 0; a->newDirection = 0; a->gun = createWeapon(); a->lives = 0; a->kills = 0; return a; } PUBLIC int getPlayerFrame(Player p) { return p->frame; } PUBLIC void movePlayer(Player p, int up, int down, int right, int left, int mouseX, int mouseY, bool reload) { int newX = 0, newY = 0, diagonal; int xDelta, yDelta, distance, xComp = 0, yComp = 0; if(reload) reloadWeapon(p->gun); p->isMoving = 0; if (up && !down) { newY--; p->isMoving = 1; } if (down && !up) { newY++; p->isMoving = 1; } if (left && !right) { newX--; p->isMoving = 1; } if (right && !left) { newX++; p->isMoving = 1; } diagonal = (newX != 0 && newY != 0); // Set player absolute pos p->posX += p->diaSpeed * diagonal * newX + p->speed * !diagonal * newX; p->posY += p->diaSpeed * diagonal * newY + p->speed * !diagonal * newY; wallPlayerCollisionHandling(&(p->posX), &(p->posY), PLAYER_RADIUS); // Collision detection with window if (p->posY-PLAYER_RADIUS <= 0) p->posY = PLAYER_RADIUS; if (p->posY+PLAYER_RADIUS >= WINDOWHEIGHT) p->posY = WINDOWHEIGHT - PLAYER_RADIUS; if (p->posX-PLAYER_RADIUS <= 0) p->posX = PLAYER_RADIUS; if (p->posX+PLAYER_RADIUS >= WINDOWWIDTH) p->posX = WINDOWWIDTH - PLAYER_RADIUS; // Update player sprite frame p->frameCounter = (p->frameCounter + p->isMoving) % (ANIMATIONSPEED + 1); p->frame = (p->frame + ((p->frameCounter / ANIMATIONSPEED) * p->isMoving)) % 4; // Rotate player p->direction = (atan2(mouseY - round(p->posY), mouseX - round(p->posX)) * 180 / M_PI); // -6 // Correct shotangle when mouse is close to player xDelta = mouseX - round(p->posX); yDelta = mouseY - round(p->posY); xDelta += !xDelta & !yDelta; distance = sqrt(xDelta*xDelta + yDelta*yDelta); xComp = (distance < 200)*(200-distance)*xDelta/distance; yComp = (distance < 200)*(200-distance)*yDelta/distance; // Update shooting angle p->shotAngle = atan2(mouseY+yComp - getPlayerGunbarrelY(p), mouseX+xComp - getPlayerGunbarrelX(p)); } PUBLIC double getPlayerDirection(Player p) { return p->direction; } PUBLIC int getPlayerHealth(Player p) { return p->health; } PUBLIC int getPlayerX(Player p) { return p->posX; } PUBLIC int getPlayerY(Player p) { return p->posY; } PUBLIC void activatePlayer(Player p) { p->active = true; } PUBLIC void setActivePlayer(Player p, bool newValue) { p->active = newValue; } PUBLIC bool isPlayerActive(Player p) { return p->active; } PUBLIC int getPlayerKills(Player p) { return p->kills; } PUBLIC void setPlayerKills(Player p, int newValue) { p->kills = newValue; } PUBLIC void updatePlayerPosition(Player p, int x, int y, int direction, bool alive) { p->alive = alive; p->newX = x; p->newY = y; p->newDirection = direction; p->direction = direction; } PUBLIC void moveOtherPlayers(Player p) { int xDelta = p->newX - p->posX; int yDelta = p->newY - p->posY; double distance = sqrt(xDelta * xDelta + yDelta * yDelta); double scaling = p->speed / (distance * (distance >= 1) + (distance < 1)); if (distance >= SNAP_DISTANCE) { snapPlayer(p, p->newX, p->newY); return; } if (xDelta > 1 || xDelta < -1 || yDelta > 1 || yDelta < -1) { p->xSpeed = scaling * xDelta; p->ySpeed = scaling * yDelta; p->posX += p->xSpeed; p->posY += p->ySpeed; p->frameCounter = (p->frameCounter + 1) % (ANIMATIONSPEED + 1); p->frame = (p->frame + ((p->frameCounter / ANIMATIONSPEED))) % 4; } } PUBLIC void snapPlayer(Player p, int x, int y) { p->newX = x; p->newY = y; p->posX = x; p->posY = y; } PUBLIC void damagePlayer(Player p, int damage) { p->health -= damage; if (p->health <= 0) p->alive = false; } PUBLIC bool isPlayerAlive(Player p) { return p->alive; } PUBLIC void setPlayerAlive(Player p, bool value) { p->health = HEALTH; p->alive = value; } PUBLIC bool canShoot(Player a) { return fireWeapon(a->gun); } PUBLIC void playerTick(Player a) { weaponTick(a->gun); } PUBLIC int getPlayerGunbarrelX(Player a) { return round(a->posX + (34*sin((-a->direction + 72)*M_PI/180))); } PUBLIC int getPlayerGunbarrelY(Player a) { return round(a->posY + (34*cos((-a->direction + 72)*M_PI/180))); } PUBLIC int getPlayerWeapondamage(Player a) { return getWeapondamage(a->gun); } PUBLIC int getPlayerweaponMag(Player a) { return getMag(a->gun); } PUBLIC void setPlayerhealth(Player a, int health) { a->health = health; } PUBLIC void setPlayerLives(Player a, int lives) { a->lives = lives; } PUBLIC int getPlayerlives(Player a) { return a->lives; } PUBLIC int getPlayerReloadprogress(Player a) { return getReloadprogress(a->gun); } PUBLIC void resetPlayer(Player a) { resetWeapon(a->gun); } PUBLIC double getPlayerShotAngle(Player a) { return a->shotAngle; } PUBLIC int getPlayerRadius() { return PLAYER_RADIUS; }
<gh_stars>1-10 /** * @file * @author __AUTHOR_NAME__ <<EMAIL>> * @copyright 2021 __COMPANY_LTD__ * @license <a href="https://opensource.org/licenses/MIT">MIT License</a> */ #ifndef ZEN_RENDERER_PIPELINES_POSTFX_HPP #define ZEN_RENDERER_PIPELINES_POSTFX_HPP #include "../pipeline.hpp" #include "../../display/color_matrix.hpp" namespace Zen { /** * The Post FX Pipeline is a special kind of pipeline specifically for handling * post processing effects. Where-as a standard Pipeline allows you to control * the process of rendering Game Objects by configuring the shaders and * attributes used to draw them, a Post FX Pipeline is designed to allow you to * apply processing _after_ the Game Object/s have been rendered. Typical * examples of post processing effects are bloom filters, blurs, light effects * and color manipulation. * * The pipeline works by creating a tiny vertex buffer with just one single * hard-coded quad in it. Game Objects can have a Post Pipeline set on them. * Those objects are then rendered using their standard pipeline, but are * redirected to the Render Targets owned by the post pipeline, which can then * apply their own shaders and effects, before passing them back to the main * renderer. * * The default fragment shader it uses can be found in `shaders/post_fx.frag`. * The default vertex shader it uses can be found in `shaders/quad.vert`. * * The default shader attributes for this pipeline are: * * `aPosition` (vec2, offset 0) * `aTexCoord` (vec2, offset 8) * * The vertices array layout is: * * -1, 1 B----C 1, 1 * 0, 1 | /| 1, 1 * | / | * | / | * |/ | * -1, -1 A----D 1, -1 * 0, 0 1, 0 * * A = -1, -1 (pos) and 0, 0 (uv) * B = -1, 1 (pos) and 0, 1 (uv) * C = 1, 1 (pos) and 1, 1 (uv) * D = 1, -1 (pos) and 1, 0 (uv) * * First tri: A, B, C * Second tri: A, C, D * * Array index: * * 0 = Tri 1 - Vert A - x pos * 1 = Tri 1 - Vert A - y pos * 2 = Tri 1 - Vert A - uv u * 3 = Tri 1 - Vert A - uv v * * 4 = Tri 1 - Vert B - x pos * 5 = Tri 1 - Vert B - y pos * 6 = Tri 1 - Vert B - uv u * 7 = Tri 1 - Vert B - uv v * * 8 = Tri 1 - Vert C - x pos * 9 = Tri 1 - Vert C - y pos * 10 = Tri 1 - Vert C - uv u * 11 = Tri 1 - Vert C - uv v * * 12 = Tri 2 - Vert A - x pos * 13 = Tri 2 - Vert A - y pos * 14 = Tri 2 - Vert A - uv u * 15 = Tri 2 - Vert A - uv v * * 16 = Tri 2 - Vert C - x pos * 17 = Tri 2 - Vert C - y pos * 18 = Tri 2 - Vert C - uv u * 19 = Tri 2 - Vert C - uv v * * 20 = Tri 2 - Vert D - x pos * 21 = Tri 2 - Vert D - y pos * 22 = Tri 2 - Vert D - uv u * 23 = Tri 2 - Vert D - uv v * * @class PostFXPipeline * @since 0.0.0 */ class PostFXPipeline : public Pipeline { public: /** * @since 0.0.0 * * @param config The configuration options for this pipeline. */ PostFXPipeline (PipelineConfig config); PipelineConfig prepareConfig (PipelineConfig config); void boot (); void onDraw (RenderTarget *renderTarget); /** * Copy the `source` Render Target to the `target` Render Target. * * You can optionally set the brightness factor of the copy. * * The difference between this method and `drawFrame` is that this method * uses a faster copy shader, where only the brightness can be modified. * If you need color level manipulation, see `drawFrame` instead. * * @since 0.0.0 * * @param source The source Render Target. * @param target The target Render Target. * @param brightness The brightness value applied to the frame copy. * @param clear Clear the target before copying? * @param clearAlpha Clear the alpha channel when running glClear on the target? */ void copyFrame (RenderTarget *source, RenderTarget *target, double brightness = 1., bool clear = true, bool clearAlpha = true); /** * Pops the framebuffer from the renderers FBO stack and sets that as the * active target, then draws the `source` Render Target to it. It then * resets the renderer textures. * * This should be done when you need to draw the _final_ results of a * pipeline to the game canvas, or the next framebuffer in line on the FBO * stack. You should only call this once in the `onDraw` handler and it * should be the final thing called. Be careful not to call this if you * need to actually use the pipeline shader, instead of the copy shader. In * those cases, use the `bindAndDraw` method. * * @since 0.0.0 * * @param source The Render Target to draw from. */ void copyToGame (RenderTarget *source); /** * Copy the `source` Render Target to the `target` Render Target, using the * given Color Matrix. * * The difference between this method and `copyFrame` is that this method * uses a color matrix shader, where you have full control over the luminance * values used during the copy. If you don't need this, you can use the faster * `copyFrame` method instead. * * @since 0.0.0 * * @param source The source Render Target. * @param target The target Render Target. * @param clearAlpha Clear the alpha channel when running glClear on the target? */ void drawFrame (RenderTarget *source, RenderTarget *target, bool clearAlpha = true); /** * Draws the `source1` and `source2` Render Targets to the `target` Render Target * using a linear blend effect, which is controlled by the `strength` parameter. * * @since 0.0.0 * * @param source1 The first source Render Target. * @param source2 The second source Render Target. * @param target The target Render Target. * @param strength The strength of the blend. * @param clearAlpha Clear the alpha channel when running glClear on the target? */ void blendFrames (RenderTarget* source1, RenderTarget* source2, RenderTarget* target, double strength = 1, bool clearAlpha = true); /** * Draws the `source1` and `source2` Render Targets to the `target` Render Target * using an additive blend effect, which is controlled by the `strength` parameter. * * @since 0.0.0 * * @param source1 The first source Render Target. * @param source2 The second source Render Target. * @param target The target Render Target. * @param strength The strength of the blend. * @param clearAlpha Clear the alpha channel when running glClear on the target? */ void blendFramesAdditive (RenderTarget* source1, RenderTarget* source2, RenderTarget* target, double strength = 1, bool clearAlpha = true); /** * Clears the given Render Target. * * @since 0.0.0 * * @param target The Render Target to clear. * @param clearAlpha Clear the alpha channel when running glClear on the target? */ void clearFrame (RenderTarget *target, bool clearAlpha = true); /** * Copy the `source` Render Target to the `target` Render Target. * * The difference with this copy is that no resizing takes place. If the `source` * Render Target is larger than the `target` then only a portion the same size as * the `target` dimensions is copied across. * * You can optionally set the brightness factor of the copy. * * @since 0.0.0 * * @param source The source Render Target. * @param target The target Render Target. * @param brightness The brightness value applied to the frame copy. * @param clear Clear the target before copying? * @param clearAlpha Clear the alpha channel when running glClear on the target? * @param eraseMode Erase source from target using ERASE Blend Mode? */ void blitFrame (RenderTarget *source, RenderTarget *target, double brightness = 1, bool clear = true, bool clearAlpha = true, bool eraseMode = false); /** * Binds the `source` Render Target and then copies a section of it to the * `target` Render Target. * * This method is extremely fast because it uses `glCopyTexSubImage2D` and * doesn't require the use of any shaders. Remember the coordinates are * given in standard OpenGL format, where x and y specify the lower-left * corner of the section, not the top-left. Also, the copy entirely * replaces the contents of the target texture, no 'merging' or 'blending' * takes place. * * @since 0.0.0 * * @param source The source Render Target. * @param target The target Render Target. * @param x The x coordinate of the lower left corner where to start copying. * @param y The y coordinate of the lower left corner where to start copying. * @param width The width of the texture. * @param height The height of the texture. * @param clear Clear the target before copying? * @param clearAlpha Clear the alpha channel when running glClear on the target? */ void copyFrameRect (RenderTarget *source, RenderTarget *target, int x, int y, int width, int height, bool clear = true, bool clearAlpha = true); /** * Binds this pipeline and draws the `source` Render Target to the `target` * Render Target. * * If no `target` is specified, it will pop the framebuffer from the * Renderers FBO stack and use that instead, which should be done when you * need to draw the final results of this pipeline to the game canvas. * * You can optionally set the shader to be used for the draw here, if this * is a multi-shader pipeline. By default `currentShader` will be used. If * you need to set a shader but not a target, just pass `null` as the * `target` parameter. * * @since 0.0.0 * * @param source The Render Target to draw from. * @param target The Render Target to draw to. If not set, it will pop the * fbo from the stack. * @param clear Clear the target before copying? Only used if `target` * parameter is set. * @param clearAlpha Clear the alpha channel when running glClear on the target? * @param currentShader The shader to use during the draw. */ void bindAndDraw (RenderTarget *source, RenderTarget *target = nullptr, bool clear = true, bool clearAlpha = true, Shader *currentShader = nullptr); /** * If this post-pipeline belongs to a Game Object or Camera, this contains * a reference to it. * * @since 0.0.0 */ Entity gameObject = entt::null; /** * A Color Matrix instance belonging to this pipeline. * * Used during calls to the `drawFrame` method. * * @since 0.0.0 */ ColorMatrix colorMatrix; /** * A reference to the Full Frame 1 Render Target that belongs to the * Utility Pipeline. This property is set during the `boot` method. * * This Render Target is the full size of the renderer. * * You can use this directly in Post FX Pipelines for multi-target effects. * However, be aware that these targets are shared between all post fx * pipelines. * * @since 0.0.0 */ RenderTarget *fullFrame1 = nullptr; /** * A reference to the Full Frame 2 Render Target that belongs to the * Utility Pipeline. This property is set during the `boot` method. * * This Render Target is the full size of the renderer. * * You can use this directly in Post FX Pipelines for multi-target effects. * However, be aware that these targets are shared between all post fx * pipelines. * * @since 0.0.0 */ RenderTarget *fullFrame2 = nullptr; /** * A reference to the Half Frame 1 Render Target that belongs to the * Utility Pipeline. This property is set during the `boot` method. * * This Render Target is half the size of the renderer. * * You can use this directly in Post FX Pipelines for multi-target effects. * However, be aware that these targets are shared between all post fx * pipelines. * * @since 0.0.0 */ RenderTarget *halfFrame1 = nullptr; /** * A reference to the Half Frame 2 Render Target that belongs to the * Utility Pipeline. This property is set during the `boot` method. * * This Render Target is half the size of the renderer. * * You can use this directly in Post FX Pipelines for multi-target effects. * However, be aware that these targets are shared between all post fx * pipelines. * * @since 0.0.0 */ RenderTarget *halfFrame2 = nullptr; }; } // namespace Zen #endif
import { makeAssociativeBoth } from "../_abstract/AssociativeBoth" import * as L from "../_system/Layer" export const LayerURI = "Layer" export type LayerURI = typeof LayerURI declare module "../_abstract/HKT" { interface URItoKind< Fix0, Fix1, Fix2, Fix3, K, NK extends string, SI, SO, X, I, S, Env, Err, Out > { [LayerURI]: L.Layer<X, Env, Err, Out> } } export const AssociativeBoth = makeAssociativeBoth(LayerURI)({ both: (fb) => (fa) => L.zip_(fa, fb) })
/** * @author Zahra Afsharinia */ @Controller("adminNewsController") @Scope("view") public class AdminNewsController implements Serializable { private News news = new News(); @Resource private NewsDao newsDao; public News getNews() { return news; } public void setNews(News news) { this.news = news; } public void save() { newsDao.persist(news); } public List<News> getAllNews() { return newsDao.allNews(); } }
/** * many characters in a cell. * @throws Exception if failed */ @Test public void many_characters() throws Exception { StringBuilder buf = new StringBuilder(); final int characters = 100000; for (int i = 0; i < characters; i++) { buf.append('a'); } CsvParser parser = create(buf.toString()); assertThat(parser.next(), is(true)); StringOption option = new StringOption(); parser.fill(option); assertThat(option.getAsString().length(), is(characters)); parser.endRecord(); assertThat(parser.next(), is(false)); }
//===-- OpDescriptor.cpp --------------------------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #include "llvm/FuzzMutate/OpDescriptor.h" #include "llvm/IR/Constants.h" using namespace llvm; using namespace fuzzerop; void fuzzerop::makeConstantsWithType(Type *T, std::vector<Constant *> &Cs) { if (auto *IntTy = dyn_cast<IntegerType>(T)) { uint64_t W = IntTy->getBitWidth(); Cs.push_back(ConstantInt::get(IntTy, APInt::getMaxValue(W))); Cs.push_back(ConstantInt::get(IntTy, APInt::getMinValue(W))); Cs.push_back(ConstantInt::get(IntTy, APInt::getSignedMaxValue(W))); Cs.push_back(ConstantInt::get(IntTy, APInt::getSignedMinValue(W))); Cs.push_back(ConstantInt::get(IntTy, APInt::getOneBitSet(W, W / 2))); } else if (T->isFloatingPointTy()) { auto &Ctx = T->getContext(); auto &Sem = T->getFltSemantics(); Cs.push_back(ConstantFP::get(Ctx, APFloat::getZero(Sem))); Cs.push_back(ConstantFP::get(Ctx, APFloat::getLargest(Sem))); Cs.push_back(ConstantFP::get(Ctx, APFloat::getSmallest(Sem))); } else Cs.push_back(UndefValue::get(T)); } std::vector<Constant *> fuzzerop::makeConstantsWithType(Type *T) { std::vector<Constant *> Result; makeConstantsWithType(T, Result); return Result; }
<filename>api/src/SAM_Tcsdish.cpp #include <string> #include <utility> #include <vector> #include <memory> #include <iostream> #include <ssc/sscapi.h> #include "SAM_api.h" #include "ErrorHandler.h" #include "SAM_Tcsdish.h" SAM_EXPORT SAM_Tcsdish SAM_Tcsdish_construct(const char* def, SAM_error* err){ SAM_Tcsdish result = nullptr; translateExceptions(err, [&]{ result = ssc_data_create(); }); return result; } SAM_EXPORT int SAM_Tcsdish_execute(SAM_Tcsdish data, int verbosity, SAM_error* err){ int n_err = 0; translateExceptions(err, [&]{ n_err += SAM_module_exec("tcsdish", data, verbosity, err); }); return n_err; } SAM_EXPORT void SAM_Tcsdish_destruct(SAM_Tcsdish system) { ssc_data_free(system); } SAM_EXPORT void SAM_Tcsdish_Weather_file_name_sset(SAM_Tcsdish ptr, const char* str, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_string(ptr, "file_name", str); }); } SAM_EXPORT void SAM_Tcsdish_Dish_system_capacity_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "system_capacity", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_A_proj_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "A_proj", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_A_total_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "A_total", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_I_cut_in_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "I_cut_in", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_d_ap_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "d_ap", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_d_ap_test_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "d_ap_test", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_ew_dish_sep_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "ew_dish_sep", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_h_slot_gap_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "h_slot_gap", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_n_ew_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "n_ew", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_n_ns_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "n_ns", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_ns_dish_sep_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "ns_dish_sep", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_rho_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "rho", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_slope_ew_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "slope_ew", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_slope_ns_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "slope_ns", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_test_L_focal_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_L_focal", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_test_if_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_if", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_w_slot_gap_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "w_slot_gap", number); }); } SAM_EXPORT void SAM_Tcsdish_Type295_wind_stow_speed_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "wind_stow_speed", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_A_absorber_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "A_absorber", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_A_wall_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "A_wall", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_DELTA_T_DIR_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "DELTA_T_DIR", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_DELTA_T_REFLUX_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "DELTA_T_REFLUX", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_L_cav_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "L_cav", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_L_insulation_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "L_insulation", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_P_cav_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "P_cav", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_T_heater_head_high_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "T_heater_head_high", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_T_heater_head_low_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "T_heater_head_low", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_alpha_absorber_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "alpha_absorber", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_alpha_wall_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "alpha_wall", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_d_cav_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "d_cav", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_k_insulation_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "k_insulation", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_rec_type_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "rec_type", number); }); } SAM_EXPORT void SAM_Tcsdish_Type296_transmittance_cover_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "transmittance_cover", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_Beale_const_coef_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "Beale_const_coef", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_Beale_first_coef_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "Beale_first_coef", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_Beale_fourth_coef_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "Beale_fourth_coef", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_Beale_square_coef_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "Beale_square_coef", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_Beale_third_coef_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "Beale_third_coef", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_Pressure_coef_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "Pressure_coef", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_Pressure_first_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "Pressure_first", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_T_compression_in_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "T_compression_in", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_V_displaced_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "V_displaced", number); }); } SAM_EXPORT void SAM_Tcsdish_Type297_engine_speed_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "engine_speed", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_P_controls_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "P_controls", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_P_tower_fan_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "P_tower_fan", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_T_cool_speed2_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "T_cool_speed2", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_T_cool_speed3_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "T_cool_speed3", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_Tower_water_outlet_temp_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "Tower_water_outlet_temp", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_b_cooler_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "b_cooler", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_b_radiator_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "b_radiator", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_cooling_fluid_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "cooling_fluid", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_cooling_tower_on_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "cooling_tower_on", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_d_pipe_tower_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "d_pipe_tower", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_epsilon_cooler_test_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "epsilon_cooler_test", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_epsilon_power_test_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "epsilon_power_test", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_epsilon_radiator_test_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "epsilon_radiator_test", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_eta_tower_pump_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "eta_tower_pump", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_ew_dish_separation_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "ew_dish_separation", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_fan_control_signal_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "fan_control_signal", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_fan_speed1_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "fan_speed1", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_fan_speed2_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "fan_speed2", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_fan_speed3_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "fan_speed3", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_ns_dish_separation_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "ns_dish_separation", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_pump_speed_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "pump_speed", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_system_availability_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "system_availability", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_P_fan_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_P_fan", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_P_pump_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_P_pump", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_T_fluid_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_T_fluid", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_V_dot_fluid_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_V_dot_fluid", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_cooling_fluid_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_cooling_fluid", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_fan_cfm_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_fan_cfm", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_fan_rho_air_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_fan_rho_air", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_fan_speed_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_fan_speed", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_test_pump_speed_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "test_pump_speed", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_tower_m_dot_water_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "tower_m_dot_water", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_tower_m_dot_water_test_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "tower_m_dot_water_test", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_tower_mode_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "tower_mode", number); }); } SAM_EXPORT void SAM_Tcsdish_Type298_tower_pipe_material_nset(SAM_Tcsdish ptr, double number, SAM_error *err){ translateExceptions(err, [&]{ ssc_data_set_number(ptr, "tower_pipe_material", number); }); } SAM_EXPORT const char* SAM_Tcsdish_Weather_file_name_sget(SAM_Tcsdish ptr, SAM_error *err){ const char* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_string(ptr, "file_name"); if (!result) make_access_error("SAM_Tcsdish", "file_name"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Dish_system_capacity_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "system_capacity", &result)) make_access_error("SAM_Tcsdish", "system_capacity"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_A_proj_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "A_proj", &result)) make_access_error("SAM_Tcsdish", "A_proj"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_A_total_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "A_total", &result)) make_access_error("SAM_Tcsdish", "A_total"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_I_cut_in_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "I_cut_in", &result)) make_access_error("SAM_Tcsdish", "I_cut_in"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_d_ap_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "d_ap", &result)) make_access_error("SAM_Tcsdish", "d_ap"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_d_ap_test_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "d_ap_test", &result)) make_access_error("SAM_Tcsdish", "d_ap_test"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_ew_dish_sep_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "ew_dish_sep", &result)) make_access_error("SAM_Tcsdish", "ew_dish_sep"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_h_slot_gap_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "h_slot_gap", &result)) make_access_error("SAM_Tcsdish", "h_slot_gap"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_n_ew_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "n_ew", &result)) make_access_error("SAM_Tcsdish", "n_ew"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_n_ns_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "n_ns", &result)) make_access_error("SAM_Tcsdish", "n_ns"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_ns_dish_sep_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "ns_dish_sep", &result)) make_access_error("SAM_Tcsdish", "ns_dish_sep"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_rho_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "rho", &result)) make_access_error("SAM_Tcsdish", "rho"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_slope_ew_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "slope_ew", &result)) make_access_error("SAM_Tcsdish", "slope_ew"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_slope_ns_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "slope_ns", &result)) make_access_error("SAM_Tcsdish", "slope_ns"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_test_L_focal_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_L_focal", &result)) make_access_error("SAM_Tcsdish", "test_L_focal"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_test_if_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_if", &result)) make_access_error("SAM_Tcsdish", "test_if"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_w_slot_gap_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "w_slot_gap", &result)) make_access_error("SAM_Tcsdish", "w_slot_gap"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type295_wind_stow_speed_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "wind_stow_speed", &result)) make_access_error("SAM_Tcsdish", "wind_stow_speed"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_A_absorber_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "A_absorber", &result)) make_access_error("SAM_Tcsdish", "A_absorber"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_A_wall_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "A_wall", &result)) make_access_error("SAM_Tcsdish", "A_wall"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_DELTA_T_DIR_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "DELTA_T_DIR", &result)) make_access_error("SAM_Tcsdish", "DELTA_T_DIR"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_DELTA_T_REFLUX_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "DELTA_T_REFLUX", &result)) make_access_error("SAM_Tcsdish", "DELTA_T_REFLUX"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_L_cav_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "L_cav", &result)) make_access_error("SAM_Tcsdish", "L_cav"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_L_insulation_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "L_insulation", &result)) make_access_error("SAM_Tcsdish", "L_insulation"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_P_cav_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "P_cav", &result)) make_access_error("SAM_Tcsdish", "P_cav"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_T_heater_head_high_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "T_heater_head_high", &result)) make_access_error("SAM_Tcsdish", "T_heater_head_high"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_T_heater_head_low_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "T_heater_head_low", &result)) make_access_error("SAM_Tcsdish", "T_heater_head_low"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_alpha_absorber_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "alpha_absorber", &result)) make_access_error("SAM_Tcsdish", "alpha_absorber"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_alpha_wall_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "alpha_wall", &result)) make_access_error("SAM_Tcsdish", "alpha_wall"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_d_cav_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "d_cav", &result)) make_access_error("SAM_Tcsdish", "d_cav"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_k_insulation_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "k_insulation", &result)) make_access_error("SAM_Tcsdish", "k_insulation"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_rec_type_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "rec_type", &result)) make_access_error("SAM_Tcsdish", "rec_type"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type296_transmittance_cover_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "transmittance_cover", &result)) make_access_error("SAM_Tcsdish", "transmittance_cover"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_Beale_const_coef_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "Beale_const_coef", &result)) make_access_error("SAM_Tcsdish", "Beale_const_coef"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_Beale_first_coef_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "Beale_first_coef", &result)) make_access_error("SAM_Tcsdish", "Beale_first_coef"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_Beale_fourth_coef_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "Beale_fourth_coef", &result)) make_access_error("SAM_Tcsdish", "Beale_fourth_coef"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_Beale_square_coef_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "Beale_square_coef", &result)) make_access_error("SAM_Tcsdish", "Beale_square_coef"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_Beale_third_coef_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "Beale_third_coef", &result)) make_access_error("SAM_Tcsdish", "Beale_third_coef"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_Pressure_coef_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "Pressure_coef", &result)) make_access_error("SAM_Tcsdish", "Pressure_coef"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_Pressure_first_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "Pressure_first", &result)) make_access_error("SAM_Tcsdish", "Pressure_first"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_T_compression_in_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "T_compression_in", &result)) make_access_error("SAM_Tcsdish", "T_compression_in"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_V_displaced_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "V_displaced", &result)) make_access_error("SAM_Tcsdish", "V_displaced"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type297_engine_speed_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "engine_speed", &result)) make_access_error("SAM_Tcsdish", "engine_speed"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_P_controls_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "P_controls", &result)) make_access_error("SAM_Tcsdish", "P_controls"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_P_tower_fan_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "P_tower_fan", &result)) make_access_error("SAM_Tcsdish", "P_tower_fan"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_T_cool_speed2_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "T_cool_speed2", &result)) make_access_error("SAM_Tcsdish", "T_cool_speed2"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_T_cool_speed3_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "T_cool_speed3", &result)) make_access_error("SAM_Tcsdish", "T_cool_speed3"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_Tower_water_outlet_temp_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "Tower_water_outlet_temp", &result)) make_access_error("SAM_Tcsdish", "Tower_water_outlet_temp"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_b_cooler_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "b_cooler", &result)) make_access_error("SAM_Tcsdish", "b_cooler"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_b_radiator_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "b_radiator", &result)) make_access_error("SAM_Tcsdish", "b_radiator"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_cooling_fluid_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "cooling_fluid", &result)) make_access_error("SAM_Tcsdish", "cooling_fluid"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_cooling_tower_on_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "cooling_tower_on", &result)) make_access_error("SAM_Tcsdish", "cooling_tower_on"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_d_pipe_tower_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "d_pipe_tower", &result)) make_access_error("SAM_Tcsdish", "d_pipe_tower"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_epsilon_cooler_test_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "epsilon_cooler_test", &result)) make_access_error("SAM_Tcsdish", "epsilon_cooler_test"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_epsilon_power_test_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "epsilon_power_test", &result)) make_access_error("SAM_Tcsdish", "epsilon_power_test"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_epsilon_radiator_test_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "epsilon_radiator_test", &result)) make_access_error("SAM_Tcsdish", "epsilon_radiator_test"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_eta_tower_pump_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "eta_tower_pump", &result)) make_access_error("SAM_Tcsdish", "eta_tower_pump"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_ew_dish_separation_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "ew_dish_separation", &result)) make_access_error("SAM_Tcsdish", "ew_dish_separation"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_fan_control_signal_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "fan_control_signal", &result)) make_access_error("SAM_Tcsdish", "fan_control_signal"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_fan_speed1_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "fan_speed1", &result)) make_access_error("SAM_Tcsdish", "fan_speed1"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_fan_speed2_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "fan_speed2", &result)) make_access_error("SAM_Tcsdish", "fan_speed2"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_fan_speed3_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "fan_speed3", &result)) make_access_error("SAM_Tcsdish", "fan_speed3"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_ns_dish_separation_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "ns_dish_separation", &result)) make_access_error("SAM_Tcsdish", "ns_dish_separation"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_pump_speed_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "pump_speed", &result)) make_access_error("SAM_Tcsdish", "pump_speed"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_system_availability_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "system_availability", &result)) make_access_error("SAM_Tcsdish", "system_availability"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_P_fan_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_P_fan", &result)) make_access_error("SAM_Tcsdish", "test_P_fan"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_P_pump_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_P_pump", &result)) make_access_error("SAM_Tcsdish", "test_P_pump"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_T_fluid_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_T_fluid", &result)) make_access_error("SAM_Tcsdish", "test_T_fluid"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_V_dot_fluid_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_V_dot_fluid", &result)) make_access_error("SAM_Tcsdish", "test_V_dot_fluid"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_cooling_fluid_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_cooling_fluid", &result)) make_access_error("SAM_Tcsdish", "test_cooling_fluid"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_fan_cfm_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_fan_cfm", &result)) make_access_error("SAM_Tcsdish", "test_fan_cfm"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_fan_rho_air_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_fan_rho_air", &result)) make_access_error("SAM_Tcsdish", "test_fan_rho_air"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_fan_speed_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_fan_speed", &result)) make_access_error("SAM_Tcsdish", "test_fan_speed"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_test_pump_speed_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "test_pump_speed", &result)) make_access_error("SAM_Tcsdish", "test_pump_speed"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_tower_m_dot_water_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "tower_m_dot_water", &result)) make_access_error("SAM_Tcsdish", "tower_m_dot_water"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_tower_m_dot_water_test_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "tower_m_dot_water_test", &result)) make_access_error("SAM_Tcsdish", "tower_m_dot_water_test"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_tower_mode_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "tower_mode", &result)) make_access_error("SAM_Tcsdish", "tower_mode"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Type298_tower_pipe_material_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "tower_pipe_material", &result)) make_access_error("SAM_Tcsdish", "tower_pipe_material"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_Collector_Losses_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "Collector_Losses", length); if (!result) make_access_error("SAM_Tcsdish", "Collector_Losses"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_P_SE_losses_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "P_SE_losses", length); if (!result) make_access_error("SAM_Tcsdish", "P_SE_losses"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_P_out_SE_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "P_out_SE", length); if (!result) make_access_error("SAM_Tcsdish", "P_out_SE"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_P_out_rec_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "P_out_rec", length); if (!result) make_access_error("SAM_Tcsdish", "P_out_rec"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_P_parasitic_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "P_parasitic", length); if (!result) make_access_error("SAM_Tcsdish", "P_parasitic"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_Phi_shade_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "Phi_shade", length); if (!result) make_access_error("SAM_Tcsdish", "Phi_shade"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_Power_in_collector_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "Power_in_collector", length); if (!result) make_access_error("SAM_Tcsdish", "Power_in_collector"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_Power_in_rec_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "Power_in_rec", length); if (!result) make_access_error("SAM_Tcsdish", "Power_in_rec"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_Power_out_col_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "Power_out_col", length); if (!result) make_access_error("SAM_Tcsdish", "Power_out_col"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_Q_rec_losses_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "Q_rec_losses", length); if (!result) make_access_error("SAM_Tcsdish", "Q_rec_losses"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_T_compression_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "T_compression", length); if (!result) make_access_error("SAM_Tcsdish", "T_compression"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_T_heater_head_operate_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "T_heater_head_operate", length); if (!result) make_access_error("SAM_Tcsdish", "T_heater_head_operate"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_T_tower_in_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "T_tower_in", length); if (!result) make_access_error("SAM_Tcsdish", "T_tower_in"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_T_tower_out_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "T_tower_out", length); if (!result) make_access_error("SAM_Tcsdish", "T_tower_out"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_Collector_Losses_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_Collector_Losses", &result)) make_access_error("SAM_Tcsdish", "annual_Collector_Losses"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_P_out_SE_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_P_out_SE", &result)) make_access_error("SAM_Tcsdish", "annual_P_out_SE"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_P_out_rec_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_P_out_rec", &result)) make_access_error("SAM_Tcsdish", "annual_P_out_rec"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_P_parasitic_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_P_parasitic", &result)) make_access_error("SAM_Tcsdish", "annual_P_parasitic"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_Power_in_collector_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_Power_in_collector", &result)) make_access_error("SAM_Tcsdish", "annual_Power_in_collector"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_Power_in_rec_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_Power_in_rec", &result)) make_access_error("SAM_Tcsdish", "annual_Power_in_rec"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_Power_out_col_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_Power_out_col", &result)) make_access_error("SAM_Tcsdish", "annual_Power_out_col"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_Q_rec_losses_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_Q_rec_losses", &result)) make_access_error("SAM_Tcsdish", "annual_Q_rec_losses"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_annual_energy_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "annual_energy", &result)) make_access_error("SAM_Tcsdish", "annual_energy"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_beam_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "beam", length); if (!result) make_access_error("SAM_Tcsdish", "beam"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_capacity_factor_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "capacity_factor", &result)) make_access_error("SAM_Tcsdish", "capacity_factor"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_conversion_factor_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "conversion_factor", &result)) make_access_error("SAM_Tcsdish", "conversion_factor"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_engine_pressure_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "engine_pressure", length); if (!result) make_access_error("SAM_Tcsdish", "engine_pressure"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_eta_SE_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "eta_SE", length); if (!result) make_access_error("SAM_Tcsdish", "eta_SE"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_eta_collector_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "eta_collector", length); if (!result) make_access_error("SAM_Tcsdish", "eta_collector"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_eta_net_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "eta_net", length); if (!result) make_access_error("SAM_Tcsdish", "eta_net"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_eta_rec_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "eta_rec", length); if (!result) make_access_error("SAM_Tcsdish", "eta_rec"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_gen_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "gen", length); if (!result) make_access_error("SAM_Tcsdish", "gen"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hour_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hour", length); if (!result) make_access_error("SAM_Tcsdish", "hour"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hourly_Collector_Losses_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hourly_Collector_Losses", length); if (!result) make_access_error("SAM_Tcsdish", "hourly_Collector_Losses"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hourly_P_out_SE_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hourly_P_out_SE", length); if (!result) make_access_error("SAM_Tcsdish", "hourly_P_out_SE"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hourly_P_out_rec_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hourly_P_out_rec", length); if (!result) make_access_error("SAM_Tcsdish", "hourly_P_out_rec"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hourly_P_parasitic_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hourly_P_parasitic", length); if (!result) make_access_error("SAM_Tcsdish", "hourly_P_parasitic"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hourly_Power_in_collector_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hourly_Power_in_collector", length); if (!result) make_access_error("SAM_Tcsdish", "hourly_Power_in_collector"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hourly_Power_in_rec_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hourly_Power_in_rec", length); if (!result) make_access_error("SAM_Tcsdish", "hourly_Power_in_rec"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hourly_Power_out_col_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hourly_Power_out_col", length); if (!result) make_access_error("SAM_Tcsdish", "hourly_Power_out_col"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_hourly_Q_rec_losses_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "hourly_Q_rec_losses", length); if (!result) make_access_error("SAM_Tcsdish", "hourly_Q_rec_losses"); }); return result; } SAM_EXPORT double SAM_Tcsdish_Outputs_kwh_per_kw_nget(SAM_Tcsdish ptr, SAM_error *err){ double result; translateExceptions(err, [&]{ if (!ssc_data_get_number(ptr, "kwh_per_kw", &result)) make_access_error("SAM_Tcsdish", "kwh_per_kw"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_month_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "month", length); if (!result) make_access_error("SAM_Tcsdish", "month"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_Collector_Losses_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_Collector_Losses", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_Collector_Losses"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_P_out_SE_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_P_out_SE", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_P_out_SE"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_P_out_rec_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_P_out_rec", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_P_out_rec"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_P_parasitic_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_P_parasitic", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_P_parasitic"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_Power_in_collector_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_Power_in_collector", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_Power_in_collector"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_Power_in_rec_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_Power_in_rec", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_Power_in_rec"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_Power_out_col_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_Power_out_col", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_Power_out_col"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_Q_rec_losses_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_Q_rec_losses", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_Q_rec_losses"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_monthly_energy_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "monthly_energy", length); if (!result) make_access_error("SAM_Tcsdish", "monthly_energy"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_net_power_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "net_power", length); if (!result) make_access_error("SAM_Tcsdish", "net_power"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_pres_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "pres", length); if (!result) make_access_error("SAM_Tcsdish", "pres"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_solazi_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "solazi", length); if (!result) make_access_error("SAM_Tcsdish", "solazi"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_solzen_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "solzen", length); if (!result) make_access_error("SAM_Tcsdish", "solzen"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_tdry_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "tdry", length); if (!result) make_access_error("SAM_Tcsdish", "tdry"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_twet_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "twet", length); if (!result) make_access_error("SAM_Tcsdish", "twet"); }); return result; } SAM_EXPORT double* SAM_Tcsdish_Outputs_wspd_aget(SAM_Tcsdish ptr, int* length, SAM_error *err){ double* result = nullptr; translateExceptions(err, [&]{ result = ssc_data_get_array(ptr, "wspd", length); if (!result) make_access_error("SAM_Tcsdish", "wspd"); }); return result; }
<filename>runs_wldblend/example/config_wl.py<gh_stars>0 CONFIG = { 'gal_type': 'wldeblend', 'psf_type': 'wldeblend', 'shear_scene': True, 'n_coadd': 1, 'n_coadd_psf': 1, 'dim':300, 'scale': 0.2, 'g1': 0.02, 'g2': 0.0, 'g1ex': 0.02, 'g2ex': 0.0, 'gal_kws':{'survey_name': 'LSST','bands': ('r', 'i', 'z')} } match = True invert_ex = False
/** * <p> * {@link SesameTransactionObject} holds the transaction state which consists of: * </p> * <p/> * <ul> * <li>{@link RepositoryConnection}</li> * <li>Name</li> * <li>Transaction re-use marker</li> * <li>Rollback marker</li> * <li>Timeout marker</li> * <li>Isolation level marker</li> * <li>Propagation behavior marker</li> * <li>Read only marker</li> * </ul> * * @author [email protected] * @see org.springframework.transaction.TransactionDefinition */ public class SesameTransactionObject { private final RepositoryConnection repositoryConnection; private String name = ""; private boolean existing = false; private boolean rollbackOnly = false; private int timeout = TIMEOUT_DEFAULT; private int isolationLevel = ISOLATION_DEFAULT; private int propagationBehavior = PROPAGATION_REQUIRED; private boolean readOnly = false; public SesameTransactionObject(RepositoryConnection repositoryConnection) { this.repositoryConnection = repositoryConnection; } @Override public String toString() { return "SesameTransactionObject{" + "repositoryConnection=" + repositoryConnection + ", name='" + name + '\'' + ", existing=" + existing + ", rollbackOnly=" + rollbackOnly + ", timeout=" + timeout + ", isolationLevel=" + isolationLevel + ", propagationBehavior=" + propagationBehavior + ", readOnly=" + readOnly + '}'; } public RepositoryConnection getRepositoryConnection() { return repositoryConnection; } public boolean isExisting() { return existing; } public void setExisting(boolean existing) { this.existing = existing; } public boolean isRollbackOnly() { return rollbackOnly; } public void setRollbackOnly(boolean rollbackOnly) { this.rollbackOnly = rollbackOnly; } public int getTimeout() { return timeout; } public void setTimeout(int timeout) { this.timeout = timeout; } public int getIsolationLevel() { return isolationLevel; } public void setIsolationLevel(int isolationLevel) { this.isolationLevel = isolationLevel; } public int getPropagationBehavior() { return propagationBehavior; } public void setPropagationBehavior(int propagationBehavior) { this.propagationBehavior = propagationBehavior; } public boolean isReadOnly() { return readOnly; } public void setReadOnly(boolean readOnly) { this.readOnly = readOnly; } public String getName() { return name; } public void setName(String name) { this.name = name; } }
Rachel Maddow ran a segment fact-checking the “untrue things” that Donald Trump said during Monday night’s presidential debate. “The fact-checking, certainly, could be done,” Maddow argued. “I mean most people could do it off the top of their heads.” Unfortunately, Maddow appears not to be one of those people. Let’s go over the evidence: “Pres. George H.W. Bush Signed NAFTA” Bush One began NEGOTIATING the North American Free Trade Agreement back in 1990. However, Bush’s term ended prior to the treaty’s Congressional ratification (November 1993). It was eventually signed into law by President Bill Clinton on December 8, 1993 and went into effect January of 1994. “NYC Murder Rate Going Down” The murder rate in New York City steadily declined throughout the late 1990s and early 2000s and bottomed out at 328 per year in 2014. Still, 2015 saw a 7 percent increase in the murder rate. So, technically, the murder rate is not going down. (RELATED: Hillary AND Trump Were Wrong About The NYC Murder Rate) “Trump In Favor Of Iraq War Before It Started” Trump himself addressed this accusation in the debate. Maddow was likely referencing Trump’s 2003 Howard Stern interview, in which he expressed support for Bush Two’s plan. (VIDEO: Nothing To See Here, Just Trump Criticizing The Iraq War In 2003) Trump has since admitted that at the time of the Stern interview, he didn’t really know what he was talking about, yet after discussing the issue with Fox’s Sean Hannity — who strongly supported the war — Trump began to oppose it. (RELATED: Hannity Confirms Trump’s Anti-War Stance) WATCH: “Hillary Clinton Did Not Originate Fake Birth Certificate Conspiracy” How much time do you have? While Trump certainly was on of the birther movement’s most prominent champions, there’s no denying the role Hillary Clinton played in perpetuating those rumors. Former McClatchy Washington Bureau Chief James Asher recalls how long-time Clinton surrogate Sydney Blumenthal petitioned reporters to cover the rumors back in 2007. (RELATED: Clinton Surrogate ‘Pitched’ Obama Birther During Primary) @HillaryClinton So why did your man #sidblumenthal spread the #obama birther rumor to me in 2008, asking us to investigate? Remember? — James Asher (@jimasher) September 16, 2016 Still, no one describes it better than President Obama’s former top aide Reggie Love, who noted that the moment when POTUS finally called out Clinton on the tarmac at Reagan National Airport for spreading those lies was the moment the Obama team knew it had locked up the 2008 Democratic nomination. (RELATED: Former Top Aide Recalls The Time Obama Chewed Out Hillary For Spreading Lies About Him) In a matter of seconds, she went from composed to furious. It had not been Obama’s intention to upset her, but he wasn’t going to play the fool either. To all of us watching the spat unfold, it was an obvious turning point in our campaign, and we knew it. Clinton was no less competitive or committed to a cause than Obama, and the electric tension running through both candidates and their respective staffs reflected the understanding that she was no longer the de facto Democratic candidate. Her inevitability had been questioned. Follow Datoc on Twitter and Facebook
EP671 Genital warts in pregnancy-diagnosis and treatement the most common cause of laryngeal paillomatosis in children under 10 years old Introduction/Background HPV infection is an epidemic of modern age with the highest number of infected girls between 18 and 30 years of age. Due to the alerted immune status during pregnancy the spreading of HPV infection is progressive. During the labor any retention of the child in the birth canal leads to aspiration of HPV particles witch further represents the most common cause of laryngeal papillomatosis in children. Methodology The study involved 60 pregnant women between 18 and 30 years of age diagnosed with genital warts in early and advanced stages that were treated with RF technique which enables the smooth vaginal delivery with no signs of HPV infection on genito-anal region. Radio wave technique involves a special combination of radio wave access evaporisation and radio wave melting. Radio wave access evaporisation causes the evaporation of HPV infected cells and by radio wave melting we get the bloodless removal of condyloma. Results With Colposcopic examination we reveal subclinical stages of genital warts on the mucous membrane of the labia and the entrance to the vagina, which provides conditions for their immediate removal. The result of radio wave therapy is a bloodless surgical field with a precise and controlled removal of all forms of genital warts in one act throughout pregnancy. Operation is performed only under local anesthesia with a minimum damage to the surrounding healthy tissue, rapid recovery without accompanying infection, bleeding, recurrence, and a complete protection to the mother and fetus. Conclusion Genital warts during pregnancy represent a risk to the fetus during vaginal childbirth regardless of the severity of the clinical picture. Absence of colposcopic diagnosis, avoiding removing warts in the pregnancy, use of the wrong treatment leads to progress of condylomata as for outputting an infection of the fetus, by aspiration of HPV particles in the birth canal. Disclosure Nothing to disclose.
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE LambdaCase #-} module Jikka.Core.Language.Value where import Data.Char (toLower) import Data.List (intercalate) import qualified Data.Map as M import Data.Maybe (fromMaybe) import qualified Data.Vector as V import Jikka.Common.Error import Jikka.Common.IOFormat import Jikka.Common.Matrix import Jikka.Common.ModInt import Jikka.Core.Format (formatBuiltinIsolated, formatExpr) import Jikka.Core.Language.Expr data Value = ValInt Integer | ValBool Bool | ValList (V.Vector Value) | ValTuple [Value] | ValBuiltin Builtin [Type] [Value] | -- | The `Env` may contain the `ValLambda` cyclicly. ValLambda (Maybe VarName) Env VarName Type Expr deriving (Eq, Read) type Env = [(VarName, Value)] literalToValue :: MonadError Error m => Literal -> m Value literalToValue = \case LitBuiltin builtin ts -> return $ ValBuiltin builtin ts [] LitInt n -> return $ ValInt n LitBool p -> return $ ValBool p LitNil _ -> return $ ValList V.empty LitBottom _ err -> throwRuntimeError err valueToInt :: MonadError Error m => Value -> m Integer valueToInt = \case ValInt n -> return n val -> throwInternalError $ "not an integer value: " ++ formatValue val valueToList :: MonadError Error m => Value -> m (V.Vector Value) valueToList = \case ValList xs -> return xs val -> throwInternalError $ "not a list value: " ++ formatValue val valueToIntList :: MonadError Error m => Value -> m [Integer] valueToIntList xs = mapM valueToInt . V.toList =<< valueToList xs valueToBool :: MonadError Error m => Value -> m Bool valueToBool = \case ValBool p -> return p val -> throwInternalError $ "not an boolean value: " ++ formatValue val valueToBoolList :: MonadError Error m => Value -> m [Bool] valueToBoolList xs = mapM valueToBool . V.toList =<< valueToList xs valueToTuple :: MonadError Error m => Value -> m [Value] valueToTuple = \case ValTuple xs -> return xs val -> throwInternalError $ "not a tuple value: " ++ formatValue val valueToIntPair :: MonadError Error m => Value -> m (Integer, Integer) valueToIntPair = \case ValTuple [a, b] -> (,) <$> valueToInt a <*> valueToInt b val -> throwInternalError $ "not a tuple value: " ++ formatValue val valueToVector :: MonadError Error m => Value -> m (V.Vector Integer) valueToVector = \case ValTuple x -> V.fromList <$> mapM valueToInt x val -> throwInternalError $ "not a vector: " ++ formatValue val valueToMatrix :: MonadError Error m => Value -> m (Matrix Integer) valueToMatrix a = do a <- V.mapM valueToVector . V.fromList =<< valueToTuple a case makeMatrix a of Just a -> return a Nothing -> throwInternalError $ "not a matrix: " ++ show a valueFromVector :: V.Vector Integer -> Value valueFromVector x = ValTuple (map ValInt (V.toList x)) valueFromMatrix :: Matrix Integer -> Value valueFromMatrix f = ValTuple (map (ValTuple . map ValInt . V.toList) (V.toList (unMatrix f))) valueToModVector :: MonadError Error m => Integer -> Value -> m (V.Vector ModInt) valueToModVector m x = V.map (`toModInt` m) <$> valueToVector x valueToModMatrix :: MonadError Error m => Integer -> Value -> m (Matrix ModInt) valueToModMatrix m f = fmap (`toModInt` m) <$> valueToMatrix f valueFromModVector :: V.Vector ModInt -> Value valueFromModVector = valueFromVector . V.map fromModInt valueFromModMatrix :: Matrix ModInt -> Value valueFromModMatrix = valueFromMatrix . fmap fromModInt compareValues :: Value -> Value -> Maybe Ordering compareValues a b = case (a, b) of (ValInt a, ValInt b) -> Just (compare a b) (ValBool a, ValBool b) -> Just (compare a b) (ValList a, ValList b) -> case mconcat <$> zipWithM compareValues (V.toList a) (V.toList b) of Just EQ -> Just (compare (V.length a) (V.length b)) ordering -> ordering (ValTuple a, ValTuple b) -> mconcat <$> zipWithM compareValues a b (_, _) -> Nothing compareValues' :: Value -> Value -> Ordering compareValues' a b = fromMaybe EQ (compareValues a b) minValue :: Value -> Value -> Value minValue a b = if compareValues' a b == LT then a else b maxValue :: Value -> Value -> Value maxValue a b = if compareValues' a b == GT then a else b formatValue :: Value -> String formatValue = \case ValInt n -> show n ValBool p -> map toLower (show p) ValList xs -> "[" ++ intercalate ", " (map formatValue (V.toList xs)) ++ "]" ValTuple [x] -> "(" ++ formatValue x ++ ",)" ValTuple xs -> "(" ++ intercalate ", " (map formatValue xs) ++ ")" ValBuiltin builtin ts [] -> formatBuiltinIsolated builtin ts ValBuiltin builtin ts args -> formatBuiltinIsolated builtin ts ++ "(" ++ intercalate ", " (map formatValue args) ++ ")" ValLambda _ _ x t body -> formatExpr (Lam x t body) -- Don't show env because it may be cyclic. readValueIO :: (MonadError Error m, MonadIO m) => IOFormat -> m ([Value], M.Map String Value) readValueIO = makeReadValueIO valueToInt ValInt valueToList ValList writeValueIO :: (MonadError Error m, MonadIO m) => IOFormat -> M.Map String Value -> Value -> m () writeValueIO = makeWriteValueIO valueToTuple ValInt valueToInt valueToList
<reponame>mWater/ui-builder<gh_stars>1-10 import * as React from 'react'; import { BlockDef, RenderDesignProps, RenderInstanceProps, ContextVar, ChildBlock } from '../blocks'; import CompoundBlock from '../CompoundBlock'; import { LocalizedString } from 'mwater-expressions'; /** Table of contents with nested items each showing a different block in main area */ export interface TOCBlockDef extends BlockDef { type: "toc"; /** Nestable items in the table of contents */ items: TOCItem[]; /** Optional header */ header: BlockDef | null; /** Optional footer */ footer: BlockDef | null; } /** An item within the table of contents */ interface TOCItem { /** uuid id */ id: string; /** Localized label */ label: LocalizedString; /** Content to be displayed when the item is selected */ content: BlockDef | null; /** Any children items */ children: TOCItem[]; } export declare class TOCBlock extends CompoundBlock<TOCBlockDef> { /** Get child blocks */ getChildren(contextVars: ContextVar[]): ChildBlock[]; validate(): null; processChildren(action: (self: BlockDef | null) => BlockDef | null): BlockDef; renderDesign(props: RenderDesignProps): JSX.Element; renderInstance(props: RenderInstanceProps): React.ReactElement<any>; } export {};
// Parse returns the primitive representation of the provided gender. func Parse(input string) Gender { switch input { case "♂": return Male case "♀": return Female } return None }
Multinational Study on the Clinical and Genetic Features of the ERCC6L2-Disease Biallelic mutations in ERCC6L2 were first reported to cause bone marrow failure (BMF). Additionally, we recently described a strong predisposition to erythroid lineage-restricted acute myeloid leukemia (AML-M6). Today, 31 ERCC6L2-mutated cases have been reported. This study aims to further explore the clinical and molecular features, as well as outcomes, of the ERCC6L2 patients. By June 2021, we have gathered clinical and genetic characteristics of 46 subjects in 31 families with biallelic germ line ERCC6L2 mutations from Finland (n = 21), France (n = 8), Israel (n = 1), Sweden (n = 4), and referred to a center in the United Kingdom (n = 12). Extension of the data collection from additional countries is ongoing. According to our data, ERCC6L2-disease often presents first as mild and fluctuating cytopenias with underlying bone marrow (BM) hypoplasia. With increasing age, patients develop clonal hematopoiesis with somatic mutations in TP53, and ultimately myelodysplastic syndrome (MDS) leading to a very high-risk acute myeloid leukemia. The median age of the patients at first referral to a hematologist was 18 years (range 6-65 years old). Characteristic changes in complete blood count (CBC) were mild thrombocytopenia, leukopenia, and sometimes macrocytosis. In individual pediatric or adolescent cases, more pronounced pancytopenia has also been noted. Severe BM hypoplasia was detected in many patients despite only mild changes in the CBC indicating that examining the histology of the BM biopsy, in addition to BM aspirate and CBC, is crucial. Also spontaneous, possibly transient, recovery of CBC in a few patients has occurred. All but one patient above 10 years of age, and with data from somatic mutation analysis (n = 17), carried one to four somatic TP53 mutations in their bone marrow. Among the 46 subjects, nine have been diagnosed with MDS and nine with AML (six with The French-American-British subtype M6, erythroleukemia; three with non-specific subtypes). Interestingly, increased reticulin fibrosis in the BM has been identified in at least three out of nine patients with MDS. In the nine patients diagnosed with AML, the median age of the appearance of leukemia was 37 years (range 20-65). Characteristic of TP53-mutated AML, all leukemia patients had complex hematologic karyotype and have deceased, despite intensive therapy, within one year of diagnosis. Approximately half of the patients with ERCC6L2-disease have been identified in Finland. The great majority of the patients (20/21) carry the same biallelic ERCC6L2 mutation (NM_020207.7) c.1424delT (p.Ile475ThrfsTer36). None of the Finnish families (n = 14) are consanguineous, but according to them, the ancestries reside in North-Eastern Finland indicating a founder effect. A more detailed genealogical analysis is ongoing and we suggest ERCC6L2-disease to be added to the Finnish Disease Heritage as the first cancer predisposition syndrome. Like many of the conditions identified in genetic isolates, ERCC6L2-disease is not restricted to Finland. Thus the current global effort to define the phenotype, as well as further molecular studies, will bring guidance to clinicians for tailoring follow-up and therapies for patients with ERCC6L2-disease. As a novel entity in the field of inherited bone marrow syndromes, we want to increase the awareness of ERCC6L2 -disease and encourage clinics to integrate ERCC6L2 into their germ line testing. Siitonen: celgene: Membership on an entity's Board of Directors or advisory committees; Pfizer: Consultancy, Membership on an entity's Board of Directors or advisory committees; abbvie: Membership on an entity's Board of Directors or advisory committees; Janssen-Cilag: Consultancy, Membership on an entity's Board of Directors or advisory committees; Brystol Myers Squibb: Consultancy; Novartis: Honoraria, Membership on an entity's Board of Directors or advisory committees; amgen: Honoraria.
// New returns an initialized Datastore instance. func New(osAddr string, redisConn RedisConn, requiredUsers map[string]func(json.RawMessage) (map[int]bool, string, error), projectorSlides map[string]projector.Callable, closed <-chan struct{}) (*Datastore, error) { fd, max, min, err := redisConn.FullData() if err != nil { return nil, fmt.Errorf("get startdata from redis: %w", err) } d := &Datastore{ osAddr: osAddr, redisConn: redisConn, cache: new(cache), minChangeID: min, maxChangeID: max, requiredUser: requiredUser{callables: requiredUsers}, closed: closed, } d.Projectors = NewProjectors(d, projectorSlides, closed) if err := d.update(fd, max); err != nil { return nil, fmt.Errorf("initial datastore update: %w", err) } return d, nil }
/* * Free any memory in @free but not in @dont. */ static void kvm_free_physmem_slot(struct kvm_memory_slot *free, struct kvm_memory_slot *dont) { int i; if (!dont || free->phys_mem != dont->phys_mem) if (free->phys_mem) { for (i = 0; i < free->npages; ++i) if (free->phys_mem[i]) __free_page(free->phys_mem[i]); vfree(free->phys_mem); } if (!dont || free->dirty_bitmap != dont->dirty_bitmap) vfree(free->dirty_bitmap); free->phys_mem = NULL; free->npages = 0; free->dirty_bitmap = NULL; }
def del_elem(self, id_zone): self._zones.del_element(id_zone)
Different Influence of Negative and Positive Spillover between Work and Life on Depression in a Longitudinal Study Background This study investigated the longitudinal associations between the degrees of positive and negative spillover in work–life balance (WLB) at baseline and reports of depressive mood at a 2-year follow-up in Korean women employees. Methods We used a panel study design data of 1386 women employees who participated in the Korean Longitudinal Survey of Women and Families in both 2014 and 2016. Depressive mood was measured using the “10-item Center for Epidemiologic Studies Depression Scale.” Associations between the positive and negative spillover in WLB at baseline and reports of new incidence of depressive mood at 2-year follow-up were explored using a multivariate logistic regression model. Results Negative spillover in WLB at baseline showed a significant linear association with reports of depressive mood at 2-yearfollow-up after adjusting for age, education level, marital status, number of children, and positive spillover (P = 0.014). The highest scoring group in negative spillover (fourth quartile) showed a significant higher odds ratio of 1.95 compared with the lowest scoring group (first quartile; P = 0.036). Conclusion Positive spillover in WLB showed a U-shaped association with depression. The degrees of positive and negative spillover in WLB among Korean women employees at baseline were associated with new incidence of depressive mood within 2 years. To prevent depression of female workers, more discrete and differentiated policies on how to maintain healthy WLB are required. Introduction Workelife balance (WLB), important for social well-being and institutional productivity , has been studied to be related to various mental health issues, including sleep disorders, anxiety, burnout, sickness absence, and substance abuse . WLB can be defined as an "individual's" ability to meet their work and family commitments, as well as other nonwork responsibilities and activities . Nonwork activities such as participation within community and family could seem demanding if the burden of dual duties is not shared or divided . Keeping stable boundary between these two domains of work and private life, however, might not always be feasible, although conditions of working space and hours are improving. Along with the increase in women's economic activities, the decrease in wage gap compared with that of men, and the increase in conflicts among mixed roles, women's conflicts between work and private life have been a critical issue for female laborers . Although 143 countries around the world guarantee gender equality in their constitutions, women are more vulnerable to conflicts between work and home than men. Even in Finland, where gender equality is relatively well achieved, reports of work-related health problems, such as absenteeism, were higher in women compared with that in men . In South Korea, where women's human rights, gender equality, and protection of motherhood have been gradually legalized, gender disparities are still prevalent in workplace and mental health. Social discrepancies between the two gender may explain why women are more vulnerable to conflicts in WLB, whereas biological differences can be related to intrapsychic attributes regardless of social circumstances. Depression, in particular, with a lifetime prevalence of 10e15% in the general population and its contribution to economic burden , is significantly associated with the imbalance between work and private life. There are several studies suggesting that workers' negative perspectives on workelife imbalance are related to depression, but the causal association between WLB and depression has not been clearly elucidated . Without clear evidence that workelife imbalance causes depression, interventions focusing on WLB cannot contain practical roadmaps to improve mental health . Contrary to other general stress theories at work, which focus on the stress per se without considering the main dimensions of work and private life, we focused on spillover theory to highlight the augmentation or depletion between two compartmentalized sectors of daily life, in line with the conservation of resources (COR) theory . Although COR theory deals with human resources vis a vis outer stress in one domain, spillover theory dynamically explains how transmission across two domains of home and work influences each other both positively (positive spillover) and negatively (negative spillover). Spillover theory insists that a person's attitudes, emotions, skills, and behaviors in one domain flow into the other and vice versa, and it can occur in both positive and negative ways . Negative spillover indicates that limited resources for managing multiple roles in both the domains could reciprocally induce stress and strain . Serious familial conflicts or mishaps may prevent people from focusing on their work, thus impeding their productivity significantly. This phenomenon is called negative spillover. Several studies conducted with working adults in the Netherlands, Australia, Sweden, and the United States have reported a significant association between negative spillover and mental illness, such as depression . Positive spillover means that positive aspects of the workplace could enrich life at home and vice versa , but only a few studies for their association with depression were investigated. There are some studies showing that positive spillover is more strongly associated with depression than negative spillover among dualearner couples , is associated with negatively related to problematic drinking , and can also affect their family members' depression, called "crossover" . However, the association of positive spillover with mental health has yet been studied as sufficient as that of negative spillover. Furthermore, the positive effects of the interaction between work and family have not been studied extensively from a chronological perspective. Accordingly, in the present study, we attempted to understand these longitudinal effects by examining the positive and negative interactions between work and life. Using spillover theory on the interaction between work and life, we hypothesized that (1) stronger degree of negative spillover in WLB at baseline would be related to higher incidence of depressive mood through a 2-year follow-up and (2) more positive spillover in WLB at baseline would be associated with lower incidence of depressive mood at 2-year follow-up in female employees in Korea. We also tried to identify the demographic variables involved in the process of negative or positive spillover. For evaluating the relationship between WLB and depression, the Korean Longitudinal Survey of Women and Families (KLoWF) data sets of 2014 and 2016 were used. Data collection KLoWF is a panel-designed survey that has been conducted by the Korean Women's Development Institute since 2007. It is a repeated panel survey with nearly 9000 women and a multistage sampling design with stratification for representing Korean women. The survey was conducted by trained interviewers through face-to-face interviews using a computer-assisted personal interview system. The KLoWF questionnaires consisted of three sections: household (family relationship, household income, housing, and consumption), individual (education level, marital status, pregnancy history, childbirth history, and family values), and job (economic activity, job satisfaction level, and knowledge about social insurance). In this study, we used the fifth wave data set surveyed in 2014 and the sixth wave data set in 2016 in the KLoWF panel, with a total of 8399 participants, and we targeted 2763 working women among these participants. Subjects diagnosed with psychiatric and/or chronic diseases, including cardiovascular disease, respiratory disease, and cancer, were excluded (n ¼ 476, 17.2%). After excluding subjects with missing information on demographic variables (n ¼ 721, 26.0%) and subjects with depressive symptoms (10-item Center for Epidemiological Studies Depression Scale !10) at baseline (n ¼ 180, 6.5%), the final sample size was 1386 (50.2%; Fig. 1). Items of depressive symptoms The CES-D-10 (a short form of CES-D-20 developed by the National Institute of Mental Health studies), a widely used instrument for screening depression with its well-recognized reliability and validity , was used. The reliability and validity of the Korean version of CES-D-10 were previously reported . Items included in CES-D-10 scale are shown in Supplementary table S1. The frequency of each item of symptoms in the past week was reported using a 4-point Likert scale ranging from 0 ("None of the time") to 3 ("Most of the time"). The total score of CES-D-10 ranged from 0 to 30, and this study used the cutoff value of 10, so that those with a total score of CES-D-10 ! 10 would be classified into a depressive group . As fifth and sixth wave surveys were performed with a 2-year interval among the same panel participants, in this study, newly developed depressive symptom cases were defined as those who scored CES-D-10 below 10 in the fifth wave and scored 10 points or greater CES-D-10 in the sixth wave. Items of WLB and factor analysis WLB was assessed with 11 questions that addressed spillover between work and private life. These questionnaires were developed through the consensus of experts on women's status reflecting studies from the aspects of social science, economics, and statistics, with reference to the Panel Survey of Income Dynamics (PSID) and the Household Income and Labor Dynamics in Australia (HILDA) survey . Questionnaires contained 11 items as described in Supplementary table S2, including "How much does your current work affect your life and/or vice versa?" Each question was rated on a 4-point Likert scale, as "strongly disagree (4)," "disagree (3)," "agree (2)," and "strongly agree (1)." We tested 11 items measuring WLB using factor analysis with varimax rotation to explore the domain structure. Factors with eigenvalues greater than one and items with factor loading greater than 0.40 using varimax rotation were retained. The internal consistency was evaluated by Cronbach's alpha. In addition, the average scores of items of the retained factors were calculated according to the demographic variables. Supplementary table S3 shows the results of factor analysis and factor loadings for 11 included items for WLB. Two factors with an eigenvalue greater than one were identified. Six items loaded on the first factor (eigenvalue ¼ 2.8) that could be called as "negative spillover," according to the references, limited resources for managing multiple roles in both of the domains could reciprocally induce stress and strain , are as follows : My long working hours make it difficult to fulfill my domestic obligations ; My irregular work schedule makes it difficult to fulfill my domestic obligations ; Responsibilities toward family make me work hard ; Parenting makes it difficult to stay in the current job ; Domestic matters make it difficult to concentrate on my work frequently; and I thought about quitting work to take care of sick family member at home. We found fair internal consistency among items in "negative spillover" factor, as Cronbach's alpha among items in "negative spillover" was 0.793. Four items loaded on the second factor (eigenvalue ¼ 2.2) that could be called as "positive spillover," positive aspects of the workplace could enrich life at home and vice versa , are as follows : Work gives meaning and vitality to my life ; My family appreciates me for working outside home ; My work makes my family life satisfying; and My family's recognition on my job makes me work hard. One item was excluded, as these items showed a loading factor below .40. The items had good internal consistency reliability, as Cronbach's alpha among the items in "positive spillover" was 0.799. Other variables The demographic variables and other information obtained were age and education level (middle school or below, high school, or college or above), equalized household income (calculated by dividing the income by the square root of a number of household members), marital status (single, married, separated, divorced, or widowed), number of children and job classification (white-collar job and blue-collar job). Information on health behaviors, including smoking status (current-, former-, and never-smoker), frequency of vigorous exercise, and presence of alcohol use disorder, was obtained using the CAGE questionnaire. Those who answered "yes" for at least one question of the CAGE questionnaire were classified into the risk group of alcohol use disorder . Weekly working hours of participants were classified into four groups according to the Labor Standard Act in South Korea as follows: <40 hours (shorter than standard weekly working hours), 40 hours (standard and the most frequent weekly working hours), 41e51 hours (usually permitted overtime work), and !52 hours (overtime work allowed in extraordinary situations). Statistical analysis According to the results of the factor analyses, scores of WLB factors were calculated by summing the raw scores of the relevant items, and labels for each factor were given, based on the interpretation of loaded factors. Next, we estimated the association between the quartiles of factors of WLB and newly developed depressive symptoms measured using CES-D-10. We investigated the longitudinal associations and performed multivariate logistic regression analysis between quartiles of scores of WLB factors responded by participants without depressive symptoms in the fifth KLoWF wave in 2014 and their development of depressive symptoms in the sixth KLoWF wave in 2016. We selected potential confounders a priori and used directed acyclic graphs to select adjusting variables in the model with DAGitty to draw causal diagrams . Included adjusting variables were age, education level, marital status, and number of children (Supplementary figure S1). Three models were constructed: Model 1 was an unadjusted model for each factor; Model 2 was adjusted for age, education level, marital status, and number of children; and in Model 3, all factors derived from the factor analysis were used in the one model all together, with the adjustment of variables used in Model 2. In addition, we investigated the nonparametric association between Z-score from transformation of factor-based scores of WLB and newly developed depression using the generalized additive model with smoothing spline, with the adjustments of same variables of the aforementioned models. All statistical analyses were performed with SAS (version 9.4; SAS Institute, Cary, NC, USA) and R version 3.4.4 (R Foundation for Statistical Computing, Vienna, Austria). Two-tailed P values <0.05 were considered to indicate statistical significance. Table 1 shows the general characteristics of a total of 1386 study subjects in 2014 when the fifth wave of KLoWF survey was performed and the incidence of depressive symptoms between 2 years of follow-up. The mean age and standard deviation of participants was 42.9 AE 10.0 years. Most participants were college graduates or above (n ¼ 694, 50.1%), married (n ¼ 1005, 72.5%), and had two children (n ¼ 695, 50.1%). White-collar job (n ¼ 715, 51.6%) and blue-collar job (n ¼ 671, 48.4%) were evenly distributed. Most participants responded that they worked 40 hours a week (n ¼ 71.5 51.6%). Almost all subjects were nonsmokers (n ¼ 1372, 99.0%) and nonalcoholics (n ¼ 1355, 97.8%), but only less than one-third of the participants exercised regularly (n ¼ 340, 24.5%). Higher prevalence of depressive symptoms was significantly associated with older age, low household income, more children, and longer weekly working hours. Development of depression also significantly differed by marital status: 5.8% and 7.1% among single and married groups, respectively. Results Supplementary table 4 shows that groups who experienced higher negative spillover in 2014 (fifth wave) were people aged 19e 29 years, educated up to high school, never married, having no child, white collar, and workers who worked less than 40 working hours per week. More positive spillover was found among middle school graduates or less, lower household income, white collar, workers who worked less than 40 working hours per week, and women who exercised more than once per week. Table 2 shows the results of associations between workelife imbalance and new development of depression symptoms. The highest scoring group in negative spillover (fourth quartile) showed significant higher odds ratio of 1.95 (95% confidence interval ¼ 1.05e3.64) compared with that of the lowest scoring group (first quartile) in Model 3. Nonparametric analysis of negative spillover with newly developed depression showed a linear relationship (Fig. 2). Compared with the lowest quartile of positive spillover group, second quartile (OR ¼ 0.53, 95% CI ¼ 0.32e0.87) and third quartile groups (OR ¼ 0.41, 95% CI ¼ 0.18e0.91) had significant lower OR in Model 3 but fourth quartile did not (OR ¼ 0.69, 95% CI ¼ 0.39e1.24). Nonparametric analysis showed a U-shaped graph, having protective effects for incidence of depressive symptoms below Z-score of 0.5 but showed increased risk of incidence of depressive symptoms after that point (Fig. 2). Discussion We found significant longitudinal association between worke life imbalance and newly developed depression symptoms. Two factors of WLB, negative and positive spillover, were selected to test the association between WLB and depressive symptoms, after adjusting for age, education level, marital status, and number of children. Negative spillover between work and life was linearly associated with new incidence of depression symptoms, but positive spillover between work and life showed U-shaped association. To the best of our knowledge, this is a novel finding from a longitudinal study of proportional relationship among degree of WLB, spillover phenomena, and new incidence of depression among working women. Only a few longitudinal studies for WLB and depression have been investigated until now. A longitudinal study in South Korea reported that work disturbance due to family responsibilities was associated with depressive symptoms . Another longitudinal study conducted among dual-earner couples in the United States showed that the individual's WLB, positive spillover, was related with spouses' depression but not related to their own depression . Relatively, there are many cross-sectional studies on the association between WLB and depression, such as German working population , Chinese female nurses , Chinese bank employees , and correction officers in the United States . A study in the United States reported that negative spillover was related to the development of mood, anxiety, and substance dependence . Bergs et al. reported the reciprocal association between depressive symptoms and workefamily conflict, assessing negative spillover . A cross-sectional study in Switzerland Fig. 2. Nonparametric associations between factors of workelife balance and newly developed depression. All factors were analyzed in one model with the adjustment for age, education level, marital status, and number of children. reported that subjective spillover between work and life was associated with depressive feeling, especially among female workers . Association between depression and negative spillover phenomena in workefamily conflicts was reported among Dutch workers . A cross-sectional study conducted in Switzerland reported similar results among female workers . Consistent to previous studies, we found that negative spillover was linearly associated with the incidence of depressive symptoms. Negative spillover is induced when people manage various roles both in work and life with limited resources so that it can lead to stress and strain . These findings are consistent with previous cross-sectional study examining the association of the subjective time balance between work and life with well-being in South Korea . In our study, Negative spillover includes the difficulty to fulfilling domestic obligation due to long work hours or irregular work schedule and the difficulty to concentrate on work due to parenting, caregiving, or domestic matters. These factors are related to overloaded multiple roles, which could result in psychological conflict between work and life and psychological stress . Furthermore, according to COR theory, psychological stress can occur in a possible situation for people to lose the resource even if these losses do not actually occur . Our result showed that positive spillover between work and life could be beneficial for mental health. The positive interaction between work and family has been studied from various perspectives. For instance, a meta-analysis reviewed 21 studies on workefamily enrichment and 57 studies on familyework enrichment. Findings revealed that job satisfaction, affective commitment, physical and mental health, and family satisfaction, but not turnover intention, were positively related to workefamily conflict . Greenhause and Powell suggested that the hypothetical mechanism of the positive interaction between family and work could be explained by the affective and instrumental pathways. Specifically, people's positive affect and their tendency to value their work or family could be interconnected, and this interaction would in turn enhance their performance . However, these theoretical perspectives have not been supported by statistical and quantitative data. In our study, depressive symptoms were not significantly associated with the fourth quartile of positive spillover but significantly associated with the second and third quartiles of positive spillover. It is unclear whether too much positive interaction between work and life is detrimental. The present results highlight that being optimistic alone may not guarantee physical and psychological health. Too sheer positive attitudes toward their work and family may push them to work too hard, which lead to psychological stress. Women who moderately experience positive spillover may be able to contain their family life more securely than the first and fourth quartile of positive spillover. It has been widely accepted that positive spillover, where work and life enrich each other, is beneficial for mental health . However, in our study, excessive positive spillover was not found to have desirable effects on depressive symptoms. Given that studies have reported that positive spillover between work and life is associated with workaholism , and that workaholism is associated with depression , it is plausible that excess positive spillover could be detrimental to mental health. The COR theory may explain that excessive positive spillover between work and life might paradoxically deplete personal resources and decrease the chance of consultation in the long run. The stress derived from exhaustion could exceed the advantages of positive spillover . This aligns with the results of a previous meta-analysis study that states that higher demands, lower resources, and lower adaptive organizational attitudes are associated with burnout in regard to the COR theory . Furthermore, if respondents of the fourth quartile also actively engaged with work and home, they may have had fewer opportunities to consult psychiatrists compared with loners, thus preventing the early detection of depression in the former. As social isolation increases the risk of being diagnosed with chronic illnesses , active social participation at work and home with a positive attitude seems to lower the risk of depression until a certain point. However, our results suggest that the depletion of resources stemming from obscuring the boundary between work and life may result in a tendency to look at life through rose-colored glasses, which would in turn delay intervention seeking behaviors that protect working women from depression. The following are some strengths of our study. First, the longitudinal study design, which consists of WLB questionnaires at the baseline and incidence of newly developing depressive symptoms through 2 years, supports the causal association of our results. Furthermore, the unique finding that positive spillover also negatively influences the psychological aftermath, contrary to the expectations that negative spillover is detrimental, is also a strength of our study. Second, our results were obtained from national representative data on working women in South Korea, one of the rapidly developed countries where traditional ideologies have collided with modern social norms. Third, we used a validated tool for depressive symptoms (CES-D-10) that offers better sensitivity and specificity than that of nonvalidated assessing methods in relation to WLB. Fourth, different dimensions of WLB categorized using theoretical backgrounds with statistical confirmation provided substantial data for practical strategy. However, there were several limitations in our study. First, we surveyed WLB using questionnaire of the KLoWF survey that was developed based on PSID and HILDA. Concepts of each item may be different depending on cultures and languages, although the meaningful correlation between spillover and depression was still found. Second, WLB deals with subjective feeling, and a tool for diagnosing disease with objectification of WLB needs to be further studied. Although the individuals' WLB cannot be evaluated without considering their subjective status, well-validated tools for measuring WLB are still demanded for statistical confirmation. Third, our data encompassed only female workers in South Korea. Previous studies argued that the association between WLB and depression could differ by gender . To explore whether there are differences between gender or not, future studies should include male workers. Biological diathesis in gender difference should also be discussed in future research. Finally, because of lack of information on diverse occupational stresses, such as high job demand, lack of rewards, interpersonal conflicts, and inadequate social support, possible confounding effects in the workplace were not clarified. Negative spillover in WLB is associated with the increase in developing depressive symptoms, and positive spillover in WLB is associated with the decrease in developing depressive symptoms with U-shaped relationship among South Korean women employees. Maintaining sound boundaries between work and life is necessary for WLB and preventing depression. Availability of data and materials The data sets generated and/or analyzed during the present study are available in the Korean Longitudinal Survey of Women and Families repository, https://klowf.kwdi.re.kr/portal/eng/dataSet/ rdssFileEngListPage.do? Conflicts of interest The authors report no conflicts of interest.
def next_curriculum(request): if user_check(request): return HttpResponseRedirect('/academic-procedures/') if request.method == 'POST': programme = request.POST['programme'] now = datetime.datetime.now() year = int(now.year) batch = year-1 curriculum = Curriculum.objects.all().select_related().filter(batch = batch).filter(programme = programme) if request.POST['option'] == '1': new_curriculum=[] for i in curriculum: ins=Curriculum( programme=i.programme, batch=i.batch+1, branch=i.branch, sem=i.sem, course_code=i.course_code, course_id=i.course_id, credits=i.credits, optional=i.optional, course_type=i.course_type, ) new_curriculum.append(ins) try: Curriculum.objects.bulk_create(new_curriculum) except Exception as e: print("Exception occured") print("e") elif request.POST['option'] == '2': new_curriculum=[] for i in curriculum: ins=Curriculum( programme=i.programme, batch=i.batch+1, branch=i.branch, sem=i.sem, course_code=i.course_code, course_id=i.course_id, credits=i.credits, optional=i.optional, course_type=i.course_type, ) new_curriculum.append(ins) try: Curriculum.objects.bulk_create(new_curriculum) except Exception as e: print("Exception occured!") print(e) finally: batch=batch+1 curriculum = Curriculum.objects.all().select_related().filter(batch = batch).filter(programme = programme) context= { 'curriculumm' :curriculum, 'tab_id' :['3','3'] } return render(request, "ais/ais.html", context) else: context= { 'tab_id' :['3','2'] } return render(request, "ais/ais.html", context) context= { 'tab_id' :['3','1'] } return render(request, "ais/ais.html", context)
<reponame>mkorman9/ts-express import { FC, createContext, useContext, useState, useEffect, PropsWithChildren } from 'react'; import { toast } from 'react-toastify'; import { useTranslation } from 'react-i18next'; import { UnauthorizedError } from '../common/API'; import { usePrevious } from '../common/Utils'; import { useSessionAPI } from './SessionAPI'; import type { SessionData } from './SessionAPI'; import type { SessionStatus, SessionContextType } from './SessionProvider.d'; export type { SessionEventType, SessionStatus, SessionContextType } from './SessionProvider.d'; const SessionRefreshInterval: number = 60 * 1000; const defaultSessionData: (() => SessionData) = () => ({ id: "", accessToken: "", roles: new Set() }); const defaultSessionStatus: (() => SessionStatus) = () => ({ lastEvent: "loading", isActive: false, isStillLoading: true, data: defaultSessionData() }); const Session = createContext<SessionContextType>({} as SessionContextType); const SessionProvider: FC = (props: PropsWithChildren<unknown>) => { const [sessionData, setSessionData] = useState<SessionData | null>(null); const prevSessionData = usePrevious<SessionData | null>(sessionData); const [session, setSession] = useState<SessionStatus>(() => defaultSessionStatus()); const [refreshTimer, setRefreshTimer] = useState<NodeJS.Timeout | null>(null); const { t } = useTranslation(); const { getSessionStatus, refreshSession, terminateSession } = useSessionAPI(); const newSession = (newSessionData: SessionData) => { setSessionData({ id: newSessionData.id, accessToken: newSessionData.accessToken, roles: newSessionData.roles }); }; const endSession = (): Promise<void> => { if (sessionData) { return terminateSession(sessionData.accessToken) .then(() => { setSessionData(null); }); } return Promise.reject(new UnauthorizedError([{ field: "session", code: "missing" }])); }; useEffect(() => { getSessionStatus() .then((sessionInfo) => { newSession(sessionInfo); }) .catch(err => { if (err instanceof UnauthorizedError) { setSession({ lastEvent: 'not_loaded', isActive: false, isStillLoading: false, data: defaultSessionData() }); } else { console.log(`Failed to retrieve session status: ${err}`); } }); // eslint-disable-next-line react-hooks/exhaustive-deps }, []); useEffect(() => { const startRefreshTimer = () => { if (!sessionData) { return; } const timerId = setInterval(() => { refreshSession(sessionData.accessToken) .then((sessionInfo) => { newSession(sessionInfo); }) .catch(err => { if (err instanceof UnauthorizedError) { toast.error(t('session.sessionEnded'), { autoClose: 4000, hideProgressBar: true, closeOnClick: true }); setSessionData(null); } else { console.log(`Failed to refresh session: ${err}`); } }); }, SessionRefreshInterval); setRefreshTimer(timerId); }; if (!prevSessionData && sessionData) { setSession({ lastEvent: 'started', isActive: true, isStillLoading: false, data: sessionData }); startRefreshTimer(); return; } if (prevSessionData && !sessionData) { setSession({ lastEvent: 'terminated', isActive: false, isStillLoading: false, data: defaultSessionData() }); if (refreshTimer) { clearInterval(refreshTimer); setRefreshTimer(null); } return; } if (prevSessionData && sessionData && prevSessionData.id !== sessionData.id) { setSession({ lastEvent: 'switched', isActive: true, isStillLoading: false, data: sessionData }); if (refreshTimer) { clearInterval(refreshTimer); } startRefreshTimer(); return; } // eslint-disable-next-line react-hooks/exhaustive-deps }, [sessionData]); const useActiveSession = (fn: (session: SessionStatus) => void) => { useEffect(() => { if (session.isActive && !session.isStillLoading) { fn(session); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [session]); }; const useMissingSession = (fn: () => void) => { useEffect(() => { if (!session.isActive && !session.isStillLoading) { fn(); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [session]); }; const useSwitchedSession = (fn: (session: SessionStatus) => void) => { useEffect(() => { if (session.isActive && !session.isStillLoading && session.lastEvent === 'switched') { fn(session); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [session]); }; return ( <Session.Provider value={{ session, newSession, endSession, useActiveSession, useMissingSession, useSwitchedSession }}> {props.children} </Session.Provider> ); }; export const useSession: (() => SessionContextType) = () => useContext(Session); export default SessionProvider;
Seems very conjectural, frankly. A couple ticked at having a Latina server in a Virginia diner wrote a racist zero-dollar tip that is now all over the internet. The duo ate at Jess’ Lunch in Harrisonburg on Monday and left server Sadie Elledge a note on their $26.11 bill that read “We only tip citizens.” Clearly not referring to President Wilson’s “citizens of the world” idea, their remark was confusing for Elledge, who’s actually American. She tells the local news she knew something was up the moment this couple sat down. “They wouldn’t talk to me,” she says. “They would just nod their heads.” At the end of their meal, security-camera footage shows the woman casually signing their receipt, then leaving: Owner Angeliki Floros isn’t keen on having them back, explaining that they’re banned until they apologize. “‘Coward’ is the word to describe what they did. I have worse words to use,” she adds. “But let’s go with ‘coward.’”
/** * Generates a result of a unit attacking. * Takes care to only call the pseudo-random source *once*. * * @param random A pseudo-random number source. * @param attacker The attacker. * @param defender The defender. * @return The results of the combat. */ @Override public List<CombatResult> generateAttackResult(Random random, FreeColGameObject attacker, FreeColGameObject defender) { LogBuilder lb = new LogBuilder(256); lb.add("Combat"); ArrayList<CombatResult> crs = new ArrayList<>(); CombatOdds odds = calculateCombatOdds(attacker, defender, lb); double r = randomDouble(logger, "AttackResult", random); lb.add(" random(1.0)=", r); boolean great = false; String action; if (combatIsAttack(attacker, defender)) { Unit attackerUnit = (Unit) attacker; Unit defenderUnit = (Unit) defender; action = "Attack"; if (r < odds.win || defenderUnit.isBeached()) { great = r < 0.1 * odds.win; crs.add(CombatResult.WIN); resolveAttack(attackerUnit, defenderUnit, great, r / (0.1 * odds.win), crs); } else if (r < 0.8 * odds.win + 0.2 && defenderUnit.hasAbility(Ability.EVADE_ATTACK)) { crs.add(CombatResult.NO_RESULT); crs.add(CombatResult.EVADE_ATTACK); } else { great = r >= 0.1 * odds.win + 0.9; crs.add(CombatResult.LOSE); resolveAttack(defenderUnit, attackerUnit, great, (1.25 * r - 0.25 - odds.win)/(1.0 - odds.win), crs); } } else if (combatIsBombard(attacker, defender)) { Unit defenderUnit = (Unit) defender; if (!defenderUnit.isNaval()) { throw new IllegalStateException("Bombard of non-naval"); } action = "Bombard"; if (r <= odds.win) { crs.add(CombatResult.WIN); double offencePower = getOffencePower(attacker, defender); double defencePower = getDefencePower(attacker, defender); double diff = Math.max(3.0, defencePower * 2.0 - offencePower); great = r < odds.win / diff; if (great || defenderUnit.getRepairLocation() == null) { crs.add(CombatResult.SINK_SHIP_BOMBARD); } else { crs.add(CombatResult.DAMAGE_SHIP_BOMBARD); } } else { crs.add(CombatResult.NO_RESULT); crs.add(CombatResult.EVADE_BOMBARD); } } else { throw new IllegalStateException("Bogus combat"); } lb.add(" great=", great, " ", action); for (CombatResult cr : crs) lb.add(" ", cr); lb.log(logger, Level.INFO); return crs; }
def previous_block_id(self): return self.header.previous_block_id
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1 begin_package DECL|package|org.jabref.logic.importer package|package name|org operator|. name|jabref operator|. name|logic operator|. name|importer package|; end_package begin_import import|import name|java operator|. name|io operator|. name|IOException import|; end_import begin_import import|import name|java operator|. name|net operator|. name|MalformedURLException import|; end_import begin_import import|import name|java operator|. name|net operator|. name|URL import|; end_import begin_import import|import name|java operator|. name|util operator|. name|Arrays import|; end_import begin_import import|import name|java operator|. name|util operator|. name|Optional import|; end_import begin_import import|import name|org operator|. name|jabref operator|. name|logic operator|. name|importer operator|. name|fetcher operator|. name|TrustLevel import|; end_import begin_import import|import name|org operator|. name|jabref operator|. name|model operator|. name|entry operator|. name|BibEntry import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|jupiter operator|. name|api operator|. name|AfterEach import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|jupiter operator|. name|api operator|. name|BeforeEach import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|jupiter operator|. name|api operator|. name|Test import|; end_import begin_import import|import static name|org operator|. name|junit operator|. name|jupiter operator|. name|api operator|. name|Assertions operator|. name|assertEquals import|; end_import begin_import import|import static name|org operator|. name|mockito operator|. name|Mockito operator|. name|mock import|; end_import begin_import import|import static name|org operator|. name|mockito operator|. name|Mockito operator|. name|when import|; end_import begin_class DECL|class|FulltextFetchersTest specifier|public class|class name|FulltextFetchersTest block|{ DECL|field|entry specifier|private name|BibEntry name|entry decl_stmt|; annotation|@ name|BeforeEach DECL|method|setUp () specifier|public name|void name|setUp parameter_list|() block|{ name|entry operator|= operator|new name|BibEntry argument_list|() expr_stmt|; block|} annotation|@ name|AfterEach DECL|method|tearDown () specifier|public name|void name|tearDown parameter_list|() block|{ name|entry operator|= literal|null expr_stmt|; block|} annotation|@ name|Test DECL|method|acceptPdfUrls () specifier|public name|void name|acceptPdfUrls parameter_list|() throws|throws name|MalformedURLException block|{ name|URL name|pdfUrl init|= operator|new name|URL argument_list|( literal|"http://docs.oasis-open.org/wsbpel/2.0/OS/wsbpel-v2.0-OS.pdf" argument_list|) decl_stmt|; name|FulltextFetcher name|finder init|= parameter_list|( name|e parameter_list|) lambda|-> name|Optional operator|. name|of argument_list|( name|pdfUrl argument_list|) decl_stmt|; name|FulltextFetchers name|fetcher init|= operator|new name|FulltextFetchers argument_list|( name|Arrays operator|. name|asList argument_list|( name|finder argument_list|) argument_list|) decl_stmt|; name|assertEquals argument_list|( name|Optional operator|. name|of argument_list|( name|pdfUrl argument_list|) argument_list|, name|fetcher operator|. name|findFullTextPDF argument_list|( name|entry argument_list|) argument_list|) expr_stmt|; block|} annotation|@ name|Test DECL|method|rejectNonPdfUrls () specifier|public name|void name|rejectNonPdfUrls parameter_list|() throws|throws name|MalformedURLException block|{ name|URL name|pdfUrl init|= operator|new name|URL argument_list|( literal|"https://github.com/JabRef/jabref/blob/master/README.md" argument_list|) decl_stmt|; name|FulltextFetcher name|finder init|= parameter_list|( name|e parameter_list|) lambda|-> name|Optional operator|. name|of argument_list|( name|pdfUrl argument_list|) decl_stmt|; name|FulltextFetchers name|fetcher init|= operator|new name|FulltextFetchers argument_list|( name|Arrays operator|. name|asList argument_list|( name|finder argument_list|) argument_list|) decl_stmt|; name|assertEquals argument_list|( name|Optional operator|. name|empty argument_list|() argument_list|, name|fetcher operator|. name|findFullTextPDF argument_list|( name|entry argument_list|) argument_list|) expr_stmt|; block|} annotation|@ name|Test DECL|method|noTrustLevel () specifier|public name|void name|noTrustLevel parameter_list|() throws|throws name|MalformedURLException block|{ name|URL name|pdfUrl init|= operator|new name|URL argument_list|( literal|"http://docs.oasis-open.org/wsbpel/2.0/OS/wsbpel-v2.0-OS.pdf" argument_list|) decl_stmt|; name|FulltextFetcher name|finder init|= parameter_list|( name|e parameter_list|) lambda|-> name|Optional operator|. name|of argument_list|( name|pdfUrl argument_list|) decl_stmt|; name|FulltextFetchers name|fetcher init|= operator|new name|FulltextFetchers argument_list|( name|Arrays operator|. name|asList argument_list|( name|finder argument_list|) argument_list|) decl_stmt|; name|assertEquals argument_list|( name|Optional operator|. name|of argument_list|( name|pdfUrl argument_list|) argument_list|, name|fetcher operator|. name|findFullTextPDF argument_list|( name|entry argument_list|) argument_list|) expr_stmt|; block|} annotation|@ name|Test DECL|method|higherTrustLevelWins () specifier|public name|void name|higherTrustLevelWins parameter_list|() throws|throws name|MalformedURLException throws|, name|IOException throws|, name|FetcherException block|{ specifier|final name|URL name|lowUrl init|= operator|new name|URL argument_list|( literal|"http://docs.oasis-open.org/opencsa/sca-bpel/sca-bpel-1.1-spec-cd-01.pdf" argument_list|) decl_stmt|; specifier|final name|URL name|highUrl init|= operator|new name|URL argument_list|( literal|"http://docs.oasis-open.org/wsbpel/2.0/OS/wsbpel-v2.0-OS.pdf" argument_list|) decl_stmt|; name|FulltextFetcher name|finderHigh init|= name|mock argument_list|( name|FulltextFetcher operator|. name|class argument_list|) decl_stmt|; name|FulltextFetcher name|finderLow init|= name|mock argument_list|( name|FulltextFetcher operator|. name|class argument_list|) decl_stmt|; name|when argument_list|( name|finderHigh operator|. name|getTrustLevel argument_list|() argument_list|) operator|. name|thenReturn argument_list|( name|TrustLevel operator|. name|SOURCE argument_list|) expr_stmt|; name|when argument_list|( name|finderLow operator|. name|getTrustLevel argument_list|() argument_list|) operator|. name|thenReturn argument_list|( name|TrustLevel operator|. name|UNKNOWN argument_list|) expr_stmt|; name|when argument_list|( name|finderHigh operator|. name|findFullText argument_list|( name|entry argument_list|) argument_list|) operator|. name|thenReturn argument_list|( name|Optional operator|. name|of argument_list|( name|highUrl argument_list|) argument_list|) expr_stmt|; name|when argument_list|( name|finderLow operator|. name|findFullText argument_list|( name|entry argument_list|) argument_list|) operator|. name|thenReturn argument_list|( name|Optional operator|. name|of argument_list|( name|lowUrl argument_list|) argument_list|) expr_stmt|; name|FulltextFetchers name|fetcher init|= operator|new name|FulltextFetchers argument_list|( name|Arrays operator|. name|asList argument_list|( name|finderLow argument_list|, name|finderHigh argument_list|) argument_list|) decl_stmt|; name|assertEquals argument_list|( name|Optional operator|. name|of argument_list|( name|highUrl argument_list|) argument_list|, name|fetcher operator|. name|findFullTextPDF argument_list|( name|entry argument_list|) argument_list|) expr_stmt|; block|} block|} end_class end_unit
/* * Primary routine used to startup/initialize the sensors in the system. */ iOpResult_e TWB_Sensors_Init(void) { assert_succeed(TWB_SE_ReadU8(SENSOR_CFG_EEPROM_OFFSET, &SensorConfig->ControlLoopUpdateRateHz)); __initSensorData(); Sensor_t *snsr = SensorsList; while(snsr != NULL){ snsr->SensorFault = snsr->Init(); snsr->DataReady = DataNotReady; if(snsr->SensorFault == OK) snsr->Status = SensorStatus_Ready; else{ snsr->Status = SensorStatus_Failure; TWB_Debug_Print("\r\n!!!!Failed to initialized: "); TWB_Debug_Print(snsr->Name); TWB_Debug_Print("!!!!\r\n"); return FAIL; } snsr = snsr->Next; } assert_succeed(__snsr_Configure_Incoming_IRQs()); assert_succeed(__snsr_Configure_Timer()); TWB_LEDS_SetState(LED_Snsr_Online, LED_FastBlink); SystemStatus->SnsrState = SensorAppState_Ready; I2CZeroJob = pm_malloc(sizeof(I2CJob_t)); I2CZeroJob->OutBuffer = pm_malloc(12); I2CZeroJob->InBuffer = pm_malloc(12); TWB_Debug_Print("\r\nSensors: OK\r\n"); return OK; }
<reponame>CBIIT/HPC_DME_APIs<gh_stars>1-10 /** * HpcNotificationServiceImpl.java * * <p> * Copyright SVG, Inc. Copyright Leidos Biomedical Research, Inc * * <p> * Distributed under the OSI-approved BSD 3-Clause License. See * http://ncip.github.com/HPC/LICENSE.txt for details. */ package gov.nih.nci.hpc.service.impl; import static gov.nih.nci.hpc.service.impl.HpcDomainValidator.isValidNotificationSubscription; import static gov.nih.nci.hpc.service.impl.HpcEventServiceImpl.COLLECTION_PATH_PAYLOAD_ATTRIBUTE; import static gov.nih.nci.hpc.service.impl.HpcEventServiceImpl.DATA_OBJECT_PATH_PAYLOAD_ATTRIBUTE; import gov.nih.nci.hpc.dao.HpcNotificationDAO; import gov.nih.nci.hpc.domain.error.HpcErrorType; import gov.nih.nci.hpc.domain.error.HpcRequestRejectReason; import gov.nih.nci.hpc.domain.model.HpcRequestInvoker; import gov.nih.nci.hpc.domain.notification.HpcEventPayloadEntry; import gov.nih.nci.hpc.domain.notification.HpcEventType; import gov.nih.nci.hpc.domain.notification.HpcNotificationDeliveryMethod; import gov.nih.nci.hpc.domain.notification.HpcNotificationDeliveryReceipt; import gov.nih.nci.hpc.domain.notification.HpcNotificationSubscription; import gov.nih.nci.hpc.domain.notification.HpcNotificationTrigger; import gov.nih.nci.hpc.domain.notification.HpcSystemAdminNotificationType; import gov.nih.nci.hpc.domain.user.HpcUserRole; import gov.nih.nci.hpc.exception.HpcException; import gov.nih.nci.hpc.integration.HpcDataManagementProxy; import gov.nih.nci.hpc.service.HpcNotificationService; import java.util.ArrayList; import java.util.Calendar; import java.util.EnumMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; /** * HPC Notification Application Service Implementation. * * @author <a href="mailto:<EMAIL>"><NAME></a> */ public class HpcNotificationServiceImpl implements HpcNotificationService { // ---------------------------------------------------------------------// // Instance members // ---------------------------------------------------------------------// // Map notification delivery method to its notification sender impl. private Map<HpcNotificationDeliveryMethod, HpcNotificationSender> notificationSenders = new EnumMap<>(HpcNotificationDeliveryMethod.class); // The Notification DAO instance. @Autowired private HpcNotificationDAO notificationDAO = null; // The Data Management Proxy instance. @Autowired private HpcDataManagementProxy dataManagementProxy = null; // The Data Management Authenticator. @Autowired private HpcDataManagementAuthenticator dataManagementAuthenticator = null; // Pagination support. @Autowired @Qualifier("hpcNotificationPagination") private HpcPagination pagination = null; // The system administrator NCI user ID. @Value("${hpc.service.notification.systemAdministratorUserId}") private String systemAdministratorUserId = null; //The storage administrators' NCI user IDs. @Value("${hpc.service.notification.storageAdministratorUserIds}") private String storageAdministratorUserIds = null; // The logger instance. private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); // ---------------------------------------------------------------------// // Constructors // ---------------------------------------------------------------------// /** * Default constructor disabled. * * @throws HpcException Constructor disabled. */ private HpcNotificationServiceImpl() throws HpcException { throw new HpcException("Constructor Dosabled", HpcErrorType.SPRING_CONFIGURATION_ERROR); } /** * Constructor for Spring Dependency Injection. * * @param notificationSenders The notification senders. */ private HpcNotificationServiceImpl( Map<HpcNotificationDeliveryMethod, HpcNotificationSender> notificationSenders) { this.notificationSenders.putAll(notificationSenders); } // ---------------------------------------------------------------------// // Methods // ---------------------------------------------------------------------// // ---------------------------------------------------------------------// // HpcNotificationService Interface Implementation // ---------------------------------------------------------------------// @Override public void addUpdateNotificationSubscription( HpcNotificationSubscription notificationSubscription) throws HpcException { // Input validation. if (!isValidNotificationSubscription(notificationSubscription)) { throw new HpcException("Invalid add/update notification subscription request", HpcErrorType.INVALID_REQUEST_INPUT); } // Get the service invoker. HpcRequestInvoker invoker = HpcRequestContext.getRequestInvoker(); if (invoker == null) { throw new HpcException("Unknown service invoker", HpcErrorType.UNEXPECTED_ERROR); } // Validate subscription for usage summary report is allowed for system admin only if (!invoker.getUserRole().equals(HpcUserRole.SYSTEM_ADMIN)) { if (notificationSubscription.getEventType().equals(HpcEventType.USAGE_SUMMARY_REPORT) || notificationSubscription.getEventType() .equals(HpcEventType.USAGE_SUMMARY_BY_WEEKLY_REPORT) || notificationSubscription.getEventType() .equals(HpcEventType.USAGE_SUMMARY_BY_DOC_REPORT) || notificationSubscription.getEventType() .equals(HpcEventType.USAGE_SUMMARY_BY_DOC_BY_WEEKLY_REPORT) || notificationSubscription.getEventType() .equals(HpcEventType.USAGE_SUMMARY_BY_USER_REPORT) || notificationSubscription.getEventType() .equals(HpcEventType.USAGE_SUMMARY_BY_USER_BY_WEEKLY_REPORT)) { throw new HpcException( "Not authorized to subscribe to the report. Please contact system administrator", HpcRequestRejectReason.NOT_AUTHORIZED); } } // Validate the notification triggers. validateNotificationTriggers(notificationSubscription.getNotificationTriggers()); // Upsert to DB. notificationDAO.upsertSubscription(invoker.getNciAccount().getUserId(), notificationSubscription); } @Override public void deleteNotificationSubscription(String userId, HpcEventType eventType) throws HpcException { // Input validation. if (eventType == null) { throw new HpcException("Null event type", HpcErrorType.INVALID_REQUEST_INPUT); } // Delete from DB. notificationDAO.deleteSubscription(userId, eventType); } @Override public List<HpcNotificationSubscription> getNotificationSubscriptions(String userId) throws HpcException { return notificationDAO.getSubscriptions(userId); } @Override public List<String> getNotificationSubscribedUsers(HpcEventType eventType) throws HpcException { return notificationDAO.getSubscribedUsers(eventType); } @Override public HpcNotificationSubscription getNotificationSubscription(String userId, HpcEventType eventType) throws HpcException { // Input validation. if (userId == null || eventType == null) { throw new HpcException("Invalid user ID or event type", HpcErrorType.INVALID_REQUEST_INPUT); } // Query the DB. return notificationDAO.getSubscription(userId, eventType); } @Override public boolean sendNotification(String userId, HpcEventType eventType, List<HpcEventPayloadEntry> payloadEntries, HpcNotificationDeliveryMethod deliveryMethod) { // Input validation. if (userId == null || eventType == null || deliveryMethod == null) { return false; } // Locate the notification sender for this delivery method. HpcNotificationSender notificationSender = notificationSenders.get(deliveryMethod); if (notificationSender == null) { logger.error("Could not locate notification sender for: " + deliveryMethod); return false; } // Send the notification. try { notificationSender.sendNotification(userId, eventType, payloadEntries); } catch (HpcException e) { logger.error("failed to send user notification", e); return false; } return true; } @Override public boolean sendNotification(String userId, HpcSystemAdminNotificationType notificationType, List<HpcEventPayloadEntry> payloadEntries, HpcNotificationDeliveryMethod deliveryMethod) { // Input validation. if (userId == null || notificationType == null || deliveryMethod == null) { return false; } // Locate the notification sender for this delivery method. HpcNotificationSender notificationSender = notificationSenders.get(deliveryMethod); if (notificationSender == null) { logger.error("Could not locate notification sender for: " + deliveryMethod); return false; } // Send the notification. try { notificationSender.sendNotification(userId, notificationType, payloadEntries); } catch (HpcException e) { logger.error("failed to send system admin notification", e); return false; } return true; } @Override public void createNotificationDeliveryReceipt(String userId, int eventId, HpcNotificationDeliveryMethod deliveryMethod, boolean deliveryStatus) { if (userId == null || deliveryMethod == null) { return; } HpcNotificationDeliveryReceipt deliveryReceipt = new HpcNotificationDeliveryReceipt(); deliveryReceipt.setUserId(userId); deliveryReceipt.setEventId(eventId); deliveryReceipt.setNotificationDeliveryMethod(deliveryMethod); deliveryReceipt.setDeliveryStatus(deliveryStatus); deliveryReceipt.setDelivered(Calendar.getInstance()); try { notificationDAO.upsertDeliveryReceipt(deliveryReceipt); } catch (HpcException e) { logger.error("Failed to create a delivery receipt", e); } } @Override public List<HpcNotificationDeliveryReceipt> getNotificationDeliveryReceipts(int page) throws HpcException { // Get the service invoker. HpcRequestInvoker invoker = HpcRequestContext.getRequestInvoker(); if (invoker == null) { throw new HpcException("Unknown service invoker", HpcErrorType.UNEXPECTED_ERROR); } return notificationDAO.getDeliveryReceipts(invoker.getNciAccount().getUserId(), pagination.getOffset(page), pagination.getPageSize()); } @Override public HpcNotificationDeliveryReceipt getNotificationDeliveryReceipt(int eventId) throws HpcException { // Get the service invoker. HpcRequestInvoker invoker = HpcRequestContext.getRequestInvoker(); if (invoker == null) { throw new HpcException("Unknown service invoker", HpcErrorType.UNEXPECTED_ERROR); } return notificationDAO.getDeliveryReceipt(invoker.getNciAccount().getUserId(), eventId); } @Override public int getNotificationDeliveryReceiptsPageSize() { return pagination.getPageSize(); } @Override public int getNotificationDeliveryReceiptsCount() throws HpcException { // Get the service invoker. HpcRequestInvoker invoker = HpcRequestContext.getRequestInvoker(); if (invoker == null) { throw new HpcException("Unknown service invoker", HpcErrorType.UNEXPECTED_ERROR); } return notificationDAO.getDeliveryReceiptsCount(invoker.getNciAccount().getUserId()); } public void sendNotification(HpcException exception) { sendNotification(exception, false); } @Override public void sendNotification(HpcException exception, boolean notifyStorageAdmins) { if (exception.getIntegratedSystem() != null) { logger.info("Sending a notification to system admin: {}", exception.getMessage()); // Create a payload containing the exception data. List<HpcEventPayloadEntry> payloadEntries = new ArrayList<>(); HpcEventPayloadEntry integratedSystemPayloadEntry = new HpcEventPayloadEntry(); integratedSystemPayloadEntry.setAttribute("INTEGRATED_SYSTEM"); integratedSystemPayloadEntry.setValue(exception.getIntegratedSystem().value()); payloadEntries.add(integratedSystemPayloadEntry); HpcEventPayloadEntry errorMessage = new HpcEventPayloadEntry(); errorMessage.setAttribute("ERROR_MESSAGE"); errorMessage.setValue(exception.getMessage()); payloadEntries.add(errorMessage); HpcEventPayloadEntry stackTrace = new HpcEventPayloadEntry(); stackTrace.setAttribute("STACK_TRACE"); stackTrace.setValue(exception.getStackTraceString()); payloadEntries.add(stackTrace); // Send the notification. this.sendNotification(systemAdministratorUserId, HpcSystemAdminNotificationType.INTEGRATED_SYSTEM_ERROR, payloadEntries, HpcNotificationDeliveryMethod.EMAIL); if(notifyStorageAdmins && storageAdministratorUserIds != null) { for(String userId: storageAdministratorUserIds.split(",")) { this.sendNotification(userId, HpcSystemAdminNotificationType.INTEGRATED_SYSTEM_ERROR, payloadEntries, HpcNotificationDeliveryMethod.EMAIL); } } } } // ---------------------------------------------------------------------// // Helper Methods // ---------------------------------------------------------------------// /** * Validate notification triggers include collection/data-objects that exist. In addition, event * payload for collections/data-objects are referencing the relative path of the * collection/data-object.For this reason, we make sure the triggers are referencing relative path * as well. * * @param notificationTriggers The notification triggers to validate. * @throws HpcException if found an invalid notification trigger. */ private void validateNotificationTriggers(List<HpcNotificationTrigger> notificationTriggers) throws HpcException { for (HpcNotificationTrigger notificationTrigger : notificationTriggers) { for (HpcEventPayloadEntry payloadEntry : notificationTrigger.getPayloadEntries()) { if (payloadEntry.getAttribute().equals(COLLECTION_PATH_PAYLOAD_ATTRIBUTE)) { String collectionPath = payloadEntry.getValue(); if (!dataManagementProxy .getPathAttributes(dataManagementAuthenticator.getAuthenticatedToken(), collectionPath) .getIsDirectory()) { throw new HpcException("Collection doesn't exist: " + collectionPath, HpcErrorType.INVALID_REQUEST_INPUT); } payloadEntry.setValue(dataManagementProxy.getRelativePath(collectionPath)); break; } if (payloadEntry.getAttribute().equals(DATA_OBJECT_PATH_PAYLOAD_ATTRIBUTE)) { String dataObjectPath = payloadEntry.getValue(); if (!dataManagementProxy.getPathAttributes( dataManagementAuthenticator.getAuthenticatedToken(), dataObjectPath).getIsFile()) { throw new HpcException("Data object doesn't exist: " + dataObjectPath, HpcErrorType.INVALID_REQUEST_INPUT); } payloadEntry.setValue(dataManagementProxy.getRelativePath(dataObjectPath)); } } } } }
<gh_stars>0 #pragma once // // Matroska element IDs and values // http://matroska.org/technical/specs/index.html // #define MKV_EBML 0x1A45DFA3 #define MKV_DocType 0x4282 #define MKV_Segment 0x18538067 #define MKV_Info 0x1549A966 #define MKV_TimecodeScale 0x2AD7B1 #define MKV_Duration 0x4489 #define MKV_MuxingApp 0x4D80 #define MKV_WritingApp 0x5741 #define MKV_Tracks 0x1654AE6B #define MKV_TrackEntry 0xAE #define MKV_TrackNumber 0xD7 #define MKV_TrackUID 0x73C5 #define MKV_TrackType 0x83 #define MKV_TrackType_Video 0x01 #define MKV_TrackType_Audio 0x02 #define MKV_TrackType_Subtitle 0x12 #define MKV_FlagEnabled 0xB9 #define MKV_FlagDefault 0x88 #define MKV_FlagForced 0x55AA #define MKV_FlagLacing 0x9C #define MKV_Name 0x536E #define MKV_Language 0x22B59C #define MKV_CodecID 0x86 #define MKV_ColourSpace 0x2EB524 #define MKV_Video 0xE0 #define MKV_PixelWidth 0xB0 #define MKV_PixelHeight 0xBA #define MKV_DisplayWidth 0x54B0 #define MKV_DisplayHeight 0x54BA #define MKV_DisplayUnit 0x54B2 #define MKV_DisplayUnit_Pixel 0 #define MKV_DisplayUnit_Centimeter 1 #define MKV_DisplayUnit_Inch 2 #define MKV_DisplayUnit_DisplayAspectRatio 3 #define MKV_Audio 0xE1 #define MKV_SamplingFrequency 0xB5 #define MKV_Channels 0x9F #define MKV_BitDepth 0x6264 #define MKV_Cluster 0x1F43B675 #define MKV_Timecode 0xE7 #define MKV_SimpleBlock 0xA3
Lee A. Saunders, the new president of the AFSCME, has made it clear the union representing 1.6 million public service workers will support President Barack Obama in 2012. “We must work our hearts out to re-elect President Obama,” he said during AFSCME’s 40th International Convention in Los Angeles this week. Saunders is the union’s first African American president. He was sworn in on Friday. Despite endorsing Obama’s re-election, Saunders quickly insisted that the union was non-partisan. “And we must hold politicians of all political stripes accountable for what they say and what they do,” he said. “Because when it comes to what affects our members and our rights, it’s not about left versus right; it’s about right versus wrong.” “And let me be perfectly clear about this: Our union does not work for any political party; we work for justice and fairness in the workplace. If someone takes us on, it does not matter whether they are a Democrat or a Republican, conservative or liberal. We are taking them on. And they are going down.” Republicans have sought to weaken or eliminate unions for government workers. At a Republican Governors Association conference in 2010, then-Minnesota Gov. Tim Pawlenty described public workers as “over-benefited and overpaid.” “Frankly,” he said, “the public employee unions would stick a shiv in all of us if they could.”
/** * Unit tests for {@link Signal}. */ @RunWith(AndroidJUnit4.class) @Config(sdk = {VERSION_CODES.P}, manifest = Config.NONE) public class SignalTest { @Test public void testFromList() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1,2,3,4)); CollectAll<Integer> c = new CollectAll<>(); // Act sig.observe(c); // Assert assertEquals(4, c.xs.size()); assertEquals(Arrays.asList(1, 2, 3, 4), c.xs); } @Test public void testMapList() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1,2,3,4)); Signal<String> sigAsString = sig.map(new Fn<Integer,String>() { @Override public String apply(Integer integer) { return String.valueOf(integer); } }); CollectAll<String> c = new CollectAll<>(); // Act sigAsString.observe(c); // Assert assertEquals(4, c.xs.size()); assertEquals(Arrays.asList("1", "2", "3", "4"), c.xs); } @Test public void testCombineMaps() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1,2,3,4)); Signal<Integer> sigPlus2 = sig.map(new Fn<Integer, Integer>() { @Override public Integer apply(Integer integer) { return integer + 2; } }); Signal<String> sigAsString = sigPlus2.map(new Fn<Integer, String>() { @Override public String apply(Integer integer) { return String.valueOf(integer); } }); CollectAll<String> c = new CollectAll<>(); // Act sigAsString.observe(c); // Assert assertEquals(4, c.xs.size()); assertEquals(Arrays.asList("3", "4", "5", "6"), c.xs); } @Test public void testMappedUnsubscribe() { // Assign Signal<Integer> sig = Signal.create(); Signal<String> sigString = sig.map(new Fn<Integer, String>() { @Override public String apply(Integer integer) { return integer.toString(); } }); // Act Signal.Subscription sub = sigString.observe(new Signal.ObservesNext<String>() { @Override public void onNext(@NonNull String s) { } }); // Assert assertTrue("should have an observer", sig.hasObservers()); sub.unsubscribe(); assertFalse("should have no observers", sig.hasObservers()); } @Test public void testTimeout() { // Assign Signal<?> s = Signal.create().timeout(5000); s.observe(new CollectAll<Object>()); // Act Robolectric.getForegroundThreadScheduler().advanceBy(4999, MILLISECONDS); // Assert assertThrows(RuntimeException.class, () -> Robolectric.getForegroundThreadScheduler().advanceBy(1, MILLISECONDS) ); } @Test public void testFlatMap() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1,2,3,4)); Signal<Integer> sigPlus2 = sig.flatMap(new Fn<Integer, Signal<Integer>>() { @Override public Signal<Integer> apply(Integer integer) { return Signal.from(Arrays.asList(integer, integer + 2)); } }); Signal<String> sigAsString = sigPlus2.map(new Fn<Integer, String>() { @Override public String apply(Integer integer) { return String.valueOf(integer); } }); CollectAll<String> c = new CollectAll<>(); // Act sigAsString.observe(c); // Assert assertTrue("must be completed", c.completed); assertEquals(8, c.xs.size()); assertEquals(Arrays.asList("1", "3", "2", "4", "3", "5", "4", "6"), c.xs); } @Test public void testSwitchMap() { // Assign CollectAll<Integer> c = new CollectAll<>(); int[] expected = {0}; Signal<Integer> sig = Signal.from(Arrays.asList(1,2,3,4)); Signal<Integer> sig2 = Signal.create(signal -> { signal.next(1); return new Subscription() { @Override protected void onUnsubscribe() { expected[0] = expected[0] + 1; } }; }); // Act sig.switchMap((Fn<Integer, Signal<Integer>>) integer -> sig2).observe(c); // Assert assertEquals(3, expected[0]); assertEquals(4, c.xs.size()); } @Test public void testFilter() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); Signal<Integer> odds = sig.filter(new Pred<Integer>() { @Override public boolean apply(Integer integer) { return integer % 2 == 1; } }); CollectAll<Integer> c = new CollectAll<>(); // Act odds.observe(c); // Assert assertEquals(Arrays.asList(1, 3, 5, 7, 9), c.xs); } @Test public void testFilterWillCatchExceptions() { // Assign final boolean[] error = {false}; // Act Signal.just(1) .filter( integer -> { throw new RuntimeException(); }) .onError(throwable -> error[0] = true); // Assert assertTrue("should emit an error", error[0]); } @Test public void testTakeWhile() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); Signal<Integer> odds = sig.takeWhile(new Pred<Integer>() { @Override public boolean apply(Integer integer) { return integer < 6; } }); CollectAll<Integer> c = new CollectAll<>(); // Act odds.observe(c); // Assert assertEquals(Arrays.asList(1, 2, 3, 4, 5), c.xs); } @Test public void testTake() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); Signal<Integer> odds = sig.take(5); CollectAll<Integer> c = new CollectAll<>(); // Act odds.observe(c); // Assert assertEquals(Arrays.asList(1, 2, 3, 4, 5), c.xs); assertTrue("must be completed", c.completed); } @Test public void testDrop() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); Signal<Integer> odds = sig.drop(5); CollectAll<Integer> c = new CollectAll<>(); // Act odds.observe(c); // Assert assertEquals(Arrays.asList(6, 7, 8, 9, 10), c.xs); } @Test public void testDropWhile() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); Signal<Integer> odds = sig.dropWhile(new Pred<Integer>() { @Override public boolean apply(Integer integer) { return integer < 6; } }); CollectAll<Integer> c = new CollectAll<>(); // Act odds.observe(c); // Assert assertEquals(Arrays.asList(6, 7, 8, 9, 10), c.xs); } @Test public void testSingletonSignal() { // Assign Signal<String> sig = Signal.from("Hi!"); CollectAll<String> c = new CollectAll<>(); // Act sig.observe(c); // Assert assertEquals("Hi!", c.xs.get(0)); assertTrue("completed", c.completed); assertFalse("observers removed on complete", sig.hasObservers()); } @Test public void testCannedSignal() { // Assign Signal<Integer> sig = Signal.from(Arrays.asList(1, 2, 3, 4)); CollectAll<Integer> c = new CollectAll<>(); // Act sig.observe(c); // Assert assertEquals(Arrays.asList(1, 2, 3, 4), c.xs); c.xs.clear(); // cannot re-subscribed when already finished sig.observe(c); assertEquals(0, c.xs.size()); } @Test public void testCannedMerge() { // Assign Signal<Integer> s1 = Signal.from(Arrays.asList(1, 2)); Signal<Integer> s2 = Signal.from(Arrays.asList(3, 4)); Signal<Integer> merged = Signal.merge(s1, s2); CollectAll<Integer> c = new CollectAll<>(); // Act merged.observe(c); // Assert assertEquals(Arrays.asList(1, 2, 3, 4), c.xs); assertFalse("observers removed on complete", s1.hasObservers()); assertFalse("merged observers removed on complete", merged.hasObservers()); } @Test public void tapErrorHasSubscribeHasSideEffects() { // Assign final boolean[] sideEffect = {false}; final boolean[] tappedError = {false}; Signal<Integer> xs = Signal.<Integer>create( signal -> { sideEffect[0] = true; signal.complete(); return new Signal.Subscription(); }) .tapError( t -> { tappedError[0] = true; }); // Act xs.consume(); // Assert assertTrue("consumed has side-effect", sideEffect[0]); assertFalse("consumed has no error", tappedError[0]); } @Test public void tapErrorHasNoSubscribeNoError() { // Assign final boolean[] sideEffect = {false}; final boolean[] tappedError = {false}; Signal<Integer> xs = Signal.<Integer>create( signal -> { sideEffect[0] = true; signal.complete(); return new Signal.Subscription(); }) .tapError( t -> { tappedError[0] = true; }); // Act xs.consume(); // Assert assertFalse("unconsumed has no error", tappedError[0]); } @Test public void tapErrorHasNoSubscribeHasError() { // Assign final boolean[] sideEffect = {false}; final boolean[] tappedError = {false}; Signal<Integer> xs = Signal.<Integer>create( signal -> { sideEffect[0] = true; signal.complete(); return new Signal.Subscription(); }) .tapError( t -> { tappedError[0] = true; }); xs.consume(); Signal<Integer> ys = Signal.<Integer>create( signal -> { sideEffect[0] = true; signal.next(1); return new Signal.Subscription(); }) .map( x -> { if (true) { throw new IllegalStateException(); // intentional } else { return 1; } }) .tapError(t -> tappedError[0] = true); // Act ys.consume(); // Assert assertTrue("consumed has error", tappedError[0]); } @Test public void testMergeMultiple() { // Assign Signal<Integer> merged = Signal.merge(Arrays.asList( Signal.from(Arrays.asList(1, 2, 3, 4)), Signal.from(Arrays.asList(5,6,7,8)), Signal.from(Arrays.asList(9,0,1,2)))); CollectAll<Integer> c = new CollectAll<>(); // Act merged.observe(c); // Assert assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2), c.xs); } @Test public void testDistinct() { // Assign Signal<Integer> s1 = Signal.from(Arrays.asList(1, 2, 2, 1, 5, 9, 4, 5, 8)).distinct(); CollectAll<Integer> c = new CollectAll<>(); // Act s1.observe(c); // Assert assertEquals(Arrays.asList(1, 2, 5, 9, 4, 8), c.xs); assertFalse("observers removed on complete", s1.hasObservers()); } @Test public void testScanAndLast() { // Assign Signal<Integer> s1 = Signal.from(Arrays.asList(1, 2, 2, 1, 5, 9, 4, 5, 8)); CollectAll<ArrayList<Integer>> c = new CollectAll<>(); // Act s1.scan(new ArrayList<Integer>(), new Fn2<ArrayList<Integer>, Integer, ArrayList<Integer>>() { @Override public ArrayList<Integer> apply(ArrayList<Integer> integers, Integer integer) { integers.add(integer); return integers; } }).last().observe(c); // Assert assertEquals(Arrays.asList(1, 2, 2, 1, 5, 9, 4, 5, 8), c.xs.get(0)); assertTrue("must complete", c.completed); assertEquals(1, c.count); } @Test public void testSticky() { // Assign Signal<Integer> s1 = Signal.create(); Signal<Integer> sticky = s1.sticky(); s1.next(1); s1.next(2); s1.next(3); s1.next(4); CollectAll<Integer> c = new CollectAll<>(); // Act sticky.observe(c); // Assert assertEquals(4, (int)c.xs.get(0)); assertEquals(1, c.count); } @Test public void sharedWillMultiCast() { // Assign final AtomicInteger effectCount = new AtomicInteger(); Signal<Integer> s = Signal.create(); Signal<Integer> withSideEffect = s.map(x -> { effectCount.incrementAndGet(); return x + 0; }); Signal<Integer> shared = withSideEffect.shared(); CollectAll<Integer> ob1 = new CollectAll<>(); CollectAll<Integer> ob2 = new CollectAll<>(); shared.observe(ob1); shared.observe(ob2); // Act s.next(1); // Assert assertEquals(1, ob1.xs.size()); assertEquals(1, ob2.xs.size()); assertEquals(1, effectCount.get()); // Act s.next(2); // Assert assertEquals(2, ob1.xs.size()); assertEquals(2, ob2.xs.size()); assertEquals(2, effectCount.get()); assertEquals(Arrays.asList(1, 2), ob1.xs); assertEquals(Arrays.asList(1, 2), ob2.xs); } @Test public void delayWithNoCompletion() { // Assign CollectAll<Integer> ob = new CollectAll<>(); // Act Signal.from(1).delay(1000).observe(ob); // Assert assertFalse("not completed", ob.completed); } @Test public void delayWithCompletion() { // Assign CollectAll<Integer> ob = new CollectAll<>(); Signal.from(1).delay(1000).observe(ob); // Act Robolectric.getForegroundThreadScheduler().advanceToLastPostedRunnable(); // Assert assertTrue("must be completed", ob.completed); } @Test public void delayWithUnsubscribe() { // Assign CollectAll<Integer> ob = new CollectAll<>(); Signal.Subscription s = Signal.from(1).delay(1000).observe(ob); s.unsubscribe(); // Act Robolectric.getForegroundThreadScheduler().advanceToLastPostedRunnable(); // Assert assertFalse("must not be completed", ob.completed); } @Test public void cannedIntoAsync() { // Assign CollectAll<Integer> ob = new CollectAll<>(); Signal.from(Arrays.asList(1, 2, 3)).flatMap(x -> Signal.from(x).delay(1000)).observe(ob); // Act Robolectric.getForegroundThreadScheduler().advanceToLastPostedRunnable(); // the sequential nature of async flatMap means there is a need to advance several times in // testing once for each item in the original list/async op Robolectric.getForegroundThreadScheduler().advanceToLastPostedRunnable(); Robolectric.getForegroundThreadScheduler().advanceToLastPostedRunnable(); // Assert assertTrue("must be completed", ob.completed); assertEquals(Arrays.asList(1, 2, 3), ob.xs); } @Test public void cannedIntoAsyncOrdering() { // Assign CollectAll<Integer> ob = new CollectAll<>(); Signal.from(Arrays.asList(3, 2, 1)) .flatMap(x -> Signal.from(Arrays.asList(x, x + 10, x + 20)).delay(x * 1000)).observe(ob); // Act Robolectric.getForegroundThreadScheduler().advanceToLastPostedRunnable(); // async advances from sequential flatMap Robolectric.getForegroundThreadScheduler().advanceToLastPostedRunnable(); Robolectric.getForegroundThreadScheduler().advanceToLastPostedRunnable(); // Assert assertTrue("must be completed", ob.completed); assertEquals(Arrays.asList(3, 13, 23, 2, 12, 22, 1, 11, 21), ob.xs); } @Test public void observeOnMainExecutor() throws InterruptedException { // Assign CollectAll<Boolean> ob = new CollectAll<>(); CountDownLatch countDownLatch = new CountDownLatch(1); Signal<Boolean> backgroundSignal = Signal.create( signal -> { new Thread( () -> { signal.next(true); signal.complete(); countDownLatch.countDown(); }) .start(); return new Subscription(); }); backgroundSignal .tap(value -> assertNotSame(getMainLooper().getThread(), Thread.currentThread())) .observeOn(Executors.mainThreadExecutor()) .tap(value -> assertSame(getMainLooper().getThread(), Thread.currentThread())) .observe(ob); countDownLatch.await(); // Act shadowOf(getMainLooper()).idle(); // Assert assertTrue(ob.xs.get(0)); } @Test public void observeOnMainExecutorUnsubscribed() throws InterruptedException { // Assign CountDownLatch countDownLatch = new CountDownLatch(1); CollectAll<Boolean> ob = new CollectAll<>(); Signal<Boolean> backgroundSignal = Signal.create( signal -> { new Thread( () -> { signal.next(true); signal.complete(); countDownLatch.countDown(); }) .start(); return new Subscription(); }); Subscription subscription = backgroundSignal.observeOn(Executors.mainThreadExecutor()).observe(ob); subscription.unsubscribe(); // Act countDownLatch.await(); // Assert assertTrue(ob.xs.isEmpty()); } static class CollectAll<T> implements Signal.Observer<T> { final List<T> xs = new ArrayList<>(); private boolean completed; private int count = 0; @Override public void onNext(@NonNull T t) { xs.add(t); count++; } @Override public void onError(@NonNull Throwable t) { throw new RuntimeException(t); } @Override public void onComplete() { completed = true; } } static class Noop<T> implements Signal.Observer<T> { static <T> Noop<T> create() { return new Noop<>(); } @Override public void onNext(@NonNull T t) { } @Override public void onError(@NonNull Throwable t) { } @Override public void onComplete() { } } @Test public void recoverWithNoOp() { // Assign CollectAll<Integer> is = new CollectAll<>(); // Act Signal.from(1).recoverWith(x -> Signal.empty()).observe(is); // Assert assertEquals(1, is.xs.size()); assertEquals(Collections.singletonList(1), is.xs); } @Test public void recoverWithFromEmpty() { // Assign CollectAll<Integer> is = new CollectAll<>(); // Act Signal.<Integer>empty(new RuntimeException("should not be surfaced")).recoverWith( x -> Signal.from(2)).observe(is); // Assert assertEquals(1, is.xs.size()); assertEquals(Collections.singletonList(2), is.xs); } @Test public void recursionStackSafety() throws StackOverflowError{ // Assign Signal<Integer> signal = recurse(5000); // Act assertThrows(StackOverflowError.class, signal::consume); } @Test public void distinctUntilChanged() { // Assign CollectAll<Integer> ob = new CollectAll<>(); List<Integer> expected = Arrays.asList(1, 2, 3, 2); Signal<Integer> signal = Signal.create( signal1 -> { signal1.next(1); signal1.next(1); signal1.next(2); signal1.next(2); signal1.next(3); signal1.next(2); return new Subscription(); }); // Act signal.distinctUntilChanged().observe(ob); // Assert assertEquals(expected, ob.xs); } private static Signal<Integer> recurse(int count) { return Signal.from(count).flatMap(c -> recurse(count - 1)); } }
export default (node: any, target: { replaceWith: (arg0: any) => void }) => { target.replaceWith(node) return node }
/** * Creates a stream from the URL possibly referencing KML/KMZ. If this is * a KMZ, it will be interrogated for the enclosed KML, than that KML will be * reopened. * @param xPath XPath * @param url KML/KMZ url * @return KML stream or <code>null</code> server provides response code indicating error * @throws IOException if creating stream failed * @throws SAXException if error parsing document * @throws ParserConfigurationException if error obtaining parser * @throws XPathExpressionException if invalid XPath expression */ private static InputStream openKmlStream(XPath xPath, String url) throws IOException, ParserConfigurationException, SAXException, XPathExpressionException { URL queryUrl = new URL(url); HttpURLConnection httpCon = (HttpURLConnection) queryUrl.openConnection(); httpCon.setDoInput(true); httpCon.setRequestMethod("GET"); InputStream responseStream = httpCon.getInputStream(); if (httpCon.getResponseCode() != HttpURLConnection.HTTP_OK) { return null; } String ct = httpCon.getContentType(); boolean kmz = (ct != null) && (ct.toLowerCase().indexOf("application/vnd.google-earth.kmz") != -1); InputStream wrappedStream = null; if (kmz) { wrappedStream = KmlUtil.extractKmlStream(xPath, responseStream); } else { wrappedStream = new BufferedInputStream(responseStream); } return wrappedStream; }
Time-integrated CP-violation in beauty at LHCb Precision measurements of time-integrated CP violation in beauty decays permit a better understanding of the different mechanisms underlying CP violation. They allow to better constrain the Standard Model and probe for new physics. A selection of recent LHCb results that highlight different aspects of CP violation in b-hadron decays are presented here.
async def echo(self, remote: DcmNode) -> bool: loop = asyncio.get_event_loop() try: assoc = await self._associate(remote, VerificationPresentationContexts) except FailedAssociationError: log.warn("Failed to associae for 'echo'") return False try: status = await loop.run_in_executor(self._thread_pool, assoc.send_c_echo) finally: assoc.release() if status and status.Status == 0x0: return True return False
// FracInWords returns 'frac' expressed in 'fraction's in russian words. // If result is 0 and 'showZero' is false, empty string is returned. func FracInWords(frac float64, fraction Fraction, showZero bool) string { if fraction == NoFraction { return IntInWords(int64(frac), showZero, Masculine) } absFrac := math.Abs(frac) var numberCase [3]string switch fraction { case Tenth: absFrac *= 10 numberCase = tenthNumberCase case Hundredth: absFrac *= 100 numberCase = hundredthNumberCase case Thousandth: absFrac *= 1000 numberCase = thousandthNumberCase case Tenthousandth: absFrac *= 10000 numberCase = tenthousandthNumberCase case Hundredthousandth: absFrac *= 100000 numberCase = hundredthousandthNumberCase case Millionth: absFrac *= 1000000 numberCase = millionthNumberCase case Tenmillionth: absFrac *= 10000000 numberCase = tenmillionthNumberCase case Hundredmillionth: absFrac *= 100000000 numberCase = hundredmillionthNumberCase case Milliardth: absFrac *= 1000000000 numberCase = milliardthNumberCase case Tenmilliardth: absFrac *= 10000000000 numberCase = tenmilliardthNumberCase default: absFrac *= 100000000000 numberCase = hundredmilliardthNumberCase } fint, _ := math.Modf(absFrac) ifint := int64(fint) if ifint == 0 && !showZero { return "" } r := IntInWords(ifint, false, Feminine) + " " + numberCase[getNumeralNumberCase(ifint)] if frac < 0 { return "минус " + r } return r }
/* * Tencent is pleased to support the open source community by making Blueking Container Service available. * Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved. * Licensed under the MIT License (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * http://opensource.org/licenses/MIT * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions and * limitations under the License. * */ package driver import ( "bk-bcs/bcs-common/common" "bk-bcs/bcs-common/common/codec" btypes "bk-bcs/bcs-common/common/types" "bk-bcs/bcs-services/bcs-metricservice/pkg/types" "fmt" "io/ioutil" "os" "path/filepath" simplejson "github.com/bitly/go-simplejson" ) const ( jsonCustomFormat = "%s_custom_%s.json" jsonDefaultFormat = "%s_default.json" IPPortGap = "+" ) func GetFilePath(tempDir, namespace string, ct types.ClusterType) (target string, err error) { // custom json with namespace target = filepath.Join(tempDir, fmt.Sprintf(jsonCustomFormat, ct, namespace)) if _, err = os.Stat(target); !os.IsNotExist(err) { return } // default json target = filepath.Join(tempDir, fmt.Sprintf(jsonDefaultFormat, ct.String())) if _, err = os.Stat(target); !os.IsNotExist(err) { return } return "", fmt.Errorf("%s: %s", common.BcsErrMetricResourceFileNotExistStr, tempDir) } func LoadResourceJson(tempDir, namespace string, ct types.ClusterType) (*simplejson.Json, error) { target, err := GetFilePath(tempDir, namespace, ct) if err != nil { return nil, err } template, err := ioutil.ReadFile(target) if err != nil { return nil, err } return simplejson.NewJson(template) } type StorageTaskGroupIf struct { Namespace string `json:"namespace"` Data btypes.BcsPodStatus `json:"data"` } type StoragePodIf struct { Namespace string `json:"namespace"` Data struct { Metadata btypes.ObjectMeta `json:"metadata"` Status struct { HostIP string `json:"hostIP"` PodIP string `json:"podIP"` Phase string `json:"phase"` } `json:"status"` } `json:"data"` } func GetIPMetaFromDynamic(raw []byte, metric *types.Metric) (ipMeta map[string]btypes.ObjectMeta, err error) { switch types.GetClusterType(metric.ClusterType) { case types.ClusterMesos: return GetMesosIPMeta(raw, metric) case types.ClusterK8S: return GetK8SIPMeta(raw, metric) default: err = fmt.Errorf("unknown cluster type: %s", metric.ClusterType) return } } func GetMesosIPMeta(raw []byte, metric *types.Metric) (ipMeta map[string]btypes.ObjectMeta, err error) { var data []StorageTaskGroupIf if err = codec.DecJson(raw, &data); err != nil { return } ipMeta = make(map[string]btypes.ObjectMeta) for _, item := range data { if item.Namespace != metric.Namespace { continue } if item.Data.Status != btypes.Pod_Running { continue } if len(item.Data.ContainerStatuses) == 0 { continue } taskInfo := item.Data.ContainerStatuses[0] match := true for selectKey, selectVal := range metric.Selector { if val, ok := taskInfo.Labels[selectKey]; !ok || val != selectVal { match = false break } } if !match { continue } key := "" // No-Bridge mode if taskInfo.Network != "BRIDGE" { if taskInfo.Network == "HOST" { if item.Data.HostIP != "" { key = fmt.Sprintf("%s%s%d", item.Data.HostIP, IPPortGap, metric.Port) } } else { if item.Data.PodIP != "" { key = fmt.Sprintf("%s%s%d", item.Data.PodIP, IPPortGap, metric.Port) } } } // Bridge mode if taskInfo.Network == "BRIDGE" && item.Data.HostIP != "" { key = findMesosNetworkIPKey(item.Data.ContainerStatuses, item.Data.HostIP, int(metric.Port)) } if key != "" { ipMeta[key] = item.Data.ObjectMeta } } return } func findMesosNetworkIPKey(containerStatuses []*btypes.BcsContainerStatus, hostIP string, metricPort int) string { for _, cStatus := range containerStatuses { for _, pStatus := range cStatus.Ports { if pStatus.ContainerPort == metricPort && pStatus.HostPort > 0 { return fmt.Sprintf("%s%s%d", hostIP, IPPortGap, pStatus.HostPort) } } } return "" } func GetK8SIPMeta(raw []byte, metric *types.Metric) (ipMeta map[string]btypes.ObjectMeta, err error) { var data []StoragePodIf if err = codec.DecJson(raw, &data); err != nil { return } ipMeta = make(map[string]btypes.ObjectMeta) for _, item := range data { if item.Namespace != metric.Namespace { continue } if item.Data.Status.Phase != "Running" { continue } match := true for selectKey, selectVal := range metric.Selector { if val, ok := item.Data.Metadata.Labels[selectKey]; !ok || val != selectVal { match = false break } } if !match { continue } if item.Data.Status.PodIP != "" { ipMeta[item.Data.Status.PodIP] = item.Data.Metadata continue } if item.Data.Status.HostIP != "" { ipMeta[item.Data.Status.HostIP] = item.Data.Metadata continue } } return } func GetApplicationName(metric *types.Metric) string { return fmt.Sprintf("bcs-collector-%s", metric.Namespace) }
package main import ( "fmt" "sync" ) // You can think of a waitgroup like a concurrent safe counter. It calls increment the counter by the integer passed in, // and calls to Done decrement the counter by one // Calls to wait block until the counter is zero // It's customary to couple calls to Add as closely as possible to the goroutines they're helping to track func main() { var wg sync.WaitGroup for _, salutation := range []string{"hello", "greetings", "good day"} { wg.Add(1) go func() { defer wg.Done() fmt.Println(salutation) }() } wg.Wait() //Correct format to print the contents of the string for _, salutation := range []string{"hello", "greetings", "good day"} { wg.Add(1) go func(salutation string) { defer wg.Done() fmt.Println(salutation) }(salutation) } wg.Wait() }
def _get_lui(self, citekey) -> str: lui = citekey.accession if self.namespace.get("namespaceEmbeddedInLui", False): lui = f"{self.namespace['curiePrefix']}:{lui}" return lui
class GraphTools: """Initializes the graph with nodes corresponding to the number of ROIS Parameters ---------- rois : ndarray ROIs as array tracks : list Streamlines for analysis affine : ndarray a 2-D array with ones on the diagonal and zeros elsewhere (DOESN'T APPEAR TO BE Used) outdir : Path location of output directory connectome_path : str Path for the output connectome file (.csv file) attr : int, optional Path to atlas before registration. By default None sens : str, optional, DEPRACATED type of MRI scan being analyzed (can be 'dwi' or 'func'), by default "dwi" n_cpus : int, optional Number of cpus to use when computing the edges Raises ------ ValueError graph saved with unsupported igraph modality ValueError graph saved not using edgelist, gpickle, or graphml """ def __init__( self, rois, tracks, affine, outdir, connectome_path, attr=None, sens="dwi", n_cpus=1, ): self.edge_dict = defaultdict(int) #self.roi_file = rois #self.roi_img = nib.load(self.roi_file) self.rois = nib.load(rois) self.rois = self.rois.get_data().astype("int") # self.n_ids = self.rois[self.rois > 0] # self.N = len(self.n_ids) self.modal = sens self.tracks = tracks self.affine = affine self.outdir = outdir self.connectome_path = os.path.dirname(connectome_path) self.attr = attr self.n_cpus = int(n_cpus) @timer def make_graph_old(self): """ Takes streamlines and produces a graph **Positional Arguments:** streamlines: - Fiber streamlines either file or array in a dipy EuDX or compatible format. """ print("Building connectivity matrix...") self.g = nx.Graph( name="Generated by NeuroData's MRI Graphs (m2g)", date=time.asctime(time.localtime()), source="http://m2g.io", region="brain", sensor=self.modal, ecount=0, vcount=len(self.n_ids), ) print(self.g.graph) [self.g.add_node(ids) for ids in self.n_ids] nlines = np.shape(self.tracks)[0] print("# of Streamlines: " + str(nlines)) for idx, streamline in enumerate(self.tracks): if (idx % int(nlines * 0.05)) == 0: print(idx) points = np.round(streamline).astype(int) p = set() for point in points: try: loc = self.rois[point[0], point[1], point[2]] except IndexError: loc = "" if loc: p.add(loc) edges = combinations(p, 2) for edge in edges: lst = tuple([int(node) for node in edge]) self.edge_dict[tuple(sorted(lst))] += 1 edge_list = [(k[0], k[1], v) for k, v in list(self.edge_dict.items())] self.g.add_weighted_edges_from(edge_list) return self.g, self.edge_dict @timer def make_graph(self, error_margin=2, overlap_thr=1, voxel_size=2): """Takes streamlines and produces a graph using Numpy functions Parameters ---------- error_margin : int, optional Number of mm around roi's to use (i.e. if 2, then any voxel within 2 mm of roi is considered part of roi), by default 2 overlap_thr : int, optional The amount of overlap between an roi and streamline to be considered a connection, by default 1 voxel_size : int, optional Voxel size for roi/streamlines, by default 2 Returns ------- Graph networkx Graph object containing the connectome matrix """ print("Building connectivity matrix...") # Instantiate empty networkX graph object & dictionary # Create voxel-affine mapping lin_T, offset = _mapping_to_voxel( np.eye(4) ) # TODO : voxel_size was removed in dipy 1.0.0, make sure that didn't break anything when voxel size is not 2mm self.attr = nib.load(self.attr) self.attr = self.attr.get_data().astype("int") mx = len(np.unique(self.attr.astype(np.int64))) # node_dict = dict( # zip(np.unique(self.rois).astype("int16") + 1, np.arange(mx) + 1) # ) node_dict = dict(zip(np.unique(self.attr).astype("int16"), np.arange(mx))) lost_rois = [] # Track lost rois for un in np.unique(self.attr.astype(np.int64)): if un not in self.rois: lost_rois.append(un) if len(lost_rois) > 0: with open(f"{self.connectome_path}/lost_roi.csv", mode="w") as lost_file: lost_writer = csv.writer(lost_file, delimiter=",") lost_writer.writerow(lost_rois) nlines = len(self.tracks) print("# of Streamlines: " + str(nlines)) def worker(tracks): g = nx.Graph(ecount=0, vcount=mx) # Add empty vertices nodelist = list(range(0, mx)) for node in nodelist: # (1, mx + 1): g.add_node(node) edge_dict = defaultdict(int) for s in tracks: # Map the streamlines coordinates to voxel coordinates and get labels for label_volume # i, j, k = np.vstack(np.array([get_sphere(coord, error_margin, # (voxel_size, voxel_size, voxel_size), # self.roi_img.shape) for coord in # _to_voxel_coordinates(s, lin_T, offset)])).T # Map the streamlines coordinates to voxel coordinates points = _to_voxel_coordinates(s, lin_T, offset) # get labels for label_volume i, j, k = points.T lab_arr = self.rois[i, j, k] endlabels = [] for lab in np.unique(lab_arr).astype("int16"): if (lab > 0) and (np.sum(lab_arr == lab) >= overlap_thr): endlabels.append(node_dict[lab]) endlabels = sorted(endlabels) edges = combinations(endlabels, 2) for edge in edges: edge_dict[edge] += 1 edge_list = [(k[0], k[1], v) for k, v in edge_dict.items()] g.add_weighted_edges_from(edge_list) A = nx.to_numpy_array(g, nodelist=nodelist) return A res = Parallel(n_jobs=self.n_cpus)( delayed(worker)(self.tracks[start :: self.n_cpus]) for start in range(self.n_cpus) ) conn_matrix = reduce(np.add, res) # These steps are not needed. # conn_matrix = np.array(nx.to_numpy_matrix(self.g)) # conn_matrix[np.isnan(conn_matrix)] = 0 # conn_matrix[np.isinf(conn_matrix)] = 0 # conn_matrix = np.asmatrix(np.maximum(conn_matrix, conn_matrix.transpose())) g = nx.from_numpy_matrix(conn_matrix) return g def save_graph(self, graphname, fmt="igraph"): """Saves the graph to disk Parameters ---------- graphname : str Filename for the graph fmt : str, optional Format you want the graph saved as [edgelist, gpickle, graphml, txt, npy, igraph], by default "igraph" Raises ------ ValueError Unsupported modality (not dwi or func) for saving the graph in igraph format ValueError Unsupported format """ self.g.graph["ecount"] = nx.number_of_edges(self.g) self.g = nx.convert_node_labels_to_integers(self.g, first_label=0) print(self.g.graph) if fmt == "edgelist": nx.write_weighted_edgelist(self.g, graphname, encoding="utf-8") elif fmt == "gpickle": nx.write_gpickle(self.g, graphname) elif fmt == "graphml": nx.write_graphml(self.g, graphname) elif fmt == "txt": np.savetxt(graphname, nx.to_numpy_matrix(self.g)) elif fmt == "npy": np.save(graphname, nx.to_numpy_matrix(self.g)) elif fmt == "igraph": nx.write_weighted_edgelist( self.g, graphname, delimiter=" ", encoding="utf-8" ) else: raise ValueError( "Only edgelist, gpickle, graphml, txt, and npy are currently supported" ) if not os.path.isfile(graphname): raise FileNotFoundError(f"File {graphname} not created.") print(f"Graph saved. Output location here: {graphname}") def save_graph_png(self, qa_dir, graphname): """Saves adjacency graph, made using graspy's heatmap function, as a png. This will be saved in the qa/graphs_plotting/ directory Parameters ---------- graphname : str name of the generated graph (do not include '.png') """ conn_matrix = np.array(nx.to_numpy_matrix(self.g)) conn_matrix = ptr.pass_to_ranks(conn_matrix) heatmap(conn_matrix) outpath = str(qa_dir / f"{Path(graphname).stem}.png") plt.savefig(outpath) plt.close() def summary(self): """ User friendly wrapping and display of graph properties """ print("\nGraph Summary:") print(nx.info(self.g))
class TextRank: """ Python impl of TextRank by Milhacea, et al., as a spaCy extension, used to extract the top-ranked phrases from a text document """ _EDGE_WEIGHT = 1 _POS_KEPT = ["ADJ", "NOUN", "PROPN", "VERB"] # _POS_KEPT = [ "NOUN", "ADJ","PROPN"] _TOKEN_LOOKBACK = 3 # : 79.61 # R: 81.19 # F1: 80.39 def __init__ ( self, edge_weight=_EDGE_WEIGHT, logger=None, pos_kept=_POS_KEPT, scrubber=default_scrubber, token_lookback=_TOKEN_LOOKBACK ): self.edge_weight = edge_weight self.logger = logger self.pos_kept = pos_kept self.scrubber = scrubber self.stopwords = defaultdict(list) self.token_lookback = token_lookback self.doc = None self.reset() def reset (self): """ initialize the data structures needed for extracting phrases removing any state """ self.elapsed_time = 0.0 self.lemma_graph = nx.Graph() self.phrases = defaultdict(list) self.ranks = {} self.seen_lemma = OrderedDict() def load_stopwords (self, path="stop.json"): """ load a list of "stop words" that get ignored when constructing the lemma graph -- NB: be cautious when using this feature """ stop_path = None # check if the path is fully qualified, or if the file is in # the current working directory if os.path.isfile(path): stop_path = path else: cwd = os.getcwd() stop_path = os.path.join(cwd, path) if not os.path.isfile(stop_path): loc = os.path.realpath(os.path.join(cwd, os.path.dirname(__file__))) stop_path = os.path.join(loc, path) try: with open(stop_path, "r") as f: data = json.load(f) for lemma, pos_list in data.items(): self.stopwords[lemma] = pos_list except FileNotFoundError: pass def increment_edge (self, node0, node1): """ increment the weight for an edge between the two given nodes, creating the edge first if needed """ if self.logger: self.logger.debug("link {} {}".format(node0, node1)) if self.lemma_graph.has_edge(node0, node1): self.lemma_graph[node0][node1]["weight"] += self.edge_weight else: self.lemma_graph.add_edge(node0, node1, weight=self.edge_weight) def link_sentence (self, sent): """ link nodes and edges into the lemma graph for one parsed sentence """ visited_tokens = [] visited_nodes = [] for i in range(sent.start, sent.end): token = self.doc[i] if token.pos_ in self.pos_kept: # skip any stop words... lemma = token.lemma_.lower().strip() if lemma in self.stopwords and token.pos_ in self.stopwords[lemma]: continue # ...otherwise proceed key = (token.lemma_, token.pos_) if key not in self.seen_lemma: self.seen_lemma[key] = set([token.i]) else: self.seen_lemma[key].add(token.i) node_id = list(self.seen_lemma.keys()).index(key) if not node_id in self.lemma_graph: self.lemma_graph.add_node(node_id) if self.logger: self.logger.debug("visit {} {}".format( visited_tokens, visited_nodes )) self.logger.debug("range {}".format( list(range(len(visited_tokens) - 1, -1, -1)) )) for prev_token in range(len(visited_tokens) - 1, -1, -1): if self.logger: self.logger.debug("prev_tok {} {}".format( prev_token, (token.i - visited_tokens[prev_token]) )) if (token.i - visited_tokens[prev_token]) <= self.token_lookback: self.increment_edge(node_id, visited_nodes[prev_token]) else: break if self.logger: self.logger.debug(" -- {} {} {} {} {} {}".format( token.i, token.text, token.lemma_, token.pos_, visited_tokens, visited_nodes )) visited_tokens.append(token.i) visited_nodes.append(node_id) def collect_phrases (self, chunk): """ collect instances of phrases from the lemma graph based on the given chunk """ phrase = CollectedPhrase(chunk, self.scrubber) compound_key = set([]) for i in phrase.range(): token = self.doc[i] key = (token.lemma_, token.pos_) if key in self.seen_lemma: node_id = list(self.seen_lemma.keys()).index(key) rank = self.ranks[node_id] phrase.sq_sum_rank += rank compound_key.add(key) if self.logger: self.logger.debug(" {} {} {} {}".format( token.lemma_, token.pos_, node_id, rank )) else: phrase.non_lemma += 1 phrase.set_key(compound_key) phrase.calc_rank() self.phrases[phrase.key].append(phrase) if self.logger: self.logger.debug(phrase) def calc_textrank (self): """ iterate through each sentence in the doc, constructing a lemma graph then returning the top-ranked phrases """ self.reset() t0 = time.time() for sent in self.doc.sents: self.link_sentence(sent) if self.logger: self.logger.debug(self.seen_lemma) # to run the algorithm, we use PageRank – i.e., approximating # eigenvalue centrality – to calculate ranks for each of the # nodes in the lemma graph self.ranks = nx.pagerank(self.lemma_graph) # collect the top-ranked phrases based on both the noun chunks # and the named entities for chunk in self.doc.noun_chunks: self.collect_phrases(chunk) for ent in self.doc.ents: self.collect_phrases(ent) # since noun chunks can be expressed in different ways (e.g., may # have articles or prepositions), we need to find a minimum span # for each phrase based on combinations of lemmas min_phrases = {} for phrase_key, phrase_list in self.phrases.items(): phrase_list.sort(key=lambda p: p.rank, reverse=True) best_phrase = phrase_list[0] min_phrases[best_phrase.text] = (best_phrase.rank, len(phrase_list), phrase_key) # yield results results = sorted(min_phrases.items(), key=lambda x: x[1][0], reverse=True) phrase_list = [ Phrase(p, r, c, self.phrases[k]) for p, (r, c, k) in results ] t1 = time.time() self.elapsed_time = (t1 - t0) * 1000.0 return phrase_list def write_dot (self, path="graph.dot"): """ output the lemma graph in Dot file format """ keys = list(self.seen_lemma.keys()) dot = graphviz.Digraph() for node_id in self.lemma_graph.nodes(): text = keys[node_id][0].lower() rank = self.ranks[node_id] label = "{} ({:.4f})".format(text, rank) dot.node(str(node_id), label) for edge in self.lemma_graph.edges(): dot.edge(str(edge[0]), str(edge[1]), constraint="false") with open(path, "w") as f: f.write(dot.source) def summary (self, limit_phrases=10, limit_sentences=4): """ run extractive summarization, based on vector distance per sentence from the top-ranked phrases """ unit_vector = [] # construct a list of sentence boundaries with a phrase set # for each (initialized to empty) sent_bounds = [ [s.start, s.end, set([])] for s in self.doc.sents ] # iterate through the top-ranked phrases, added them to the # phrase vector for each sentence phrase_id = 0 for p in self.doc._.phrases: unit_vector.append(p.rank) if self.logger: self.logger.debug( "{} {} {}".format(phrase_id, p.text, p.rank) ) for chunk in p.chunks: for sent_start, sent_end, sent_vector in sent_bounds: if chunk.start >= sent_start and chunk.start <= sent_end: sent_vector.add(phrase_id) if self.logger: self.logger.debug( " {} {} {} {}".format(sent_start, chunk.start, chunk.end, sent_end) ) break phrase_id += 1 if phrase_id == limit_phrases: break # construct a unit_vector for the top-ranked phrases, up to # the requested limit sum_ranks = sum(unit_vector) unit_vector = [ rank/sum_ranks for rank in unit_vector ] # iterate through each sentence, calculating its euclidean # distance from the unit vector sent_rank = {} sent_id = 0 for sent_start, sent_end, sent_vector in sent_bounds: sum_sq = 0.0 for phrase_id in range(len(unit_vector)): if phrase_id not in sent_vector: sum_sq += unit_vector[phrase_id]**2.0 sent_rank[sent_id] = sqrt(sum_sq) sent_id += 1 # extract the sentences with the lowest distance sent_text = {} sent_id = 0 for sent in self.doc.sents: sent_text[sent_id] = sent sent_id += 1 # yield results, up to the limit requested num_sent = 0 for sent_id, rank in sorted(sent_rank.items(), key=itemgetter(1)): yield sent_text[sent_id] num_sent += 1 if num_sent == limit_sentences: break def PipelineComponent (self, doc): """ define a custom pipeline component for spaCy and extend the Doc class to add TextRank """ self.doc = doc Doc.set_extension("phrases", force=True, default=[]) Doc.set_extension("textrank", force=True, default=self) doc._.phrases = self.calc_textrank() return doc
Reply to "A Critical Review of Proximal Fibular Osteotomy for Knee Osteoarthritis". Proximal fibular osteotomy is a surgical procedure that has evoked significant interest and controversy in the recent past. Vaishya et al have made a significant effort in compiling the available evidence on the topic. However, we would like to make some significant suggestions and additions to the findings in their manuscript.
def send_email(min_data): message = emails.html(subject=T(email_subject), html=T(email_contents), mail_from=(config.email["from"]["name"], config.email["from"]["email"])) message.attach(filename='temps_plot.png', data=open('temps_plot.png', 'rb')) message.attachments['temps_plot.png'].is_inline = True message.transformer.synchronize_inline_images() message.transformer.save() render_data = { "temperature": min_data[0], "temp_time": min_data[1].strftime("%-I:%M %p on %A %b %-d"), "now": datetime.now().strftime("%-I:%M %p %A %b %-d %Y") } return message.send(to=(config.email["to"]["name"], config.email["to"]["email"]), render=render_data, smtp=config.email["smtp"])
// Accept implements the NetworkListener interface. func (ln *inmemListener) Accept() (net.Conn, error) { deadline := ln.deadline if !deadline.IsZero() { select { case conn := <-ln.pendingConns: return conn, nil case <-time.After(time.Until(deadline)): return nil, deadlineError("deadline") case <-ln.stopCh: return nil, errors.New("listener shut down") } } select { case conn := <-ln.pendingConns: return conn, nil case <-ln.stopCh: return nil, errors.New("listener shut down") } }
Toward a Moral Hyperrealism This chapter discusses what it calls the “moral hyperrealism” of advertising and the ways in which advertising affects our ethical sensibility as well as the transformation of our moral universe within which our happiness and moral perfection are determined by our relationship to things and things qua commodities. It first considers the exponential growth of reflexive kinds of knowledge as a distinctive feature of late modernity before explaining why advertising is a particular form of morality. It then examines the claim by sociologists and philosophers that advertising feeds excesses and illusions and creates pathologies. It suggests that advertising has captured the moral sensibility of contemporary man better than other discourses or theories and that it has the status and the dignity of a public moral discourse whose focus is on things—viewed as the primordial incarnation of the good.
package com.foxinmy.weixin4j.server.mp.payment.v2; import java.util.HashMap; import java.util.Map; import com.alibaba.fastjson.JSON; import com.foxinmy.weixin4j.exception.PayException; import com.foxinmy.weixin4j.model.WeixinPayAccount; import com.foxinmy.weixin4j.type.SignType; import com.foxinmy.weixin4j.util.Weixin4jConfigUtil; import com.foxinmy.weixin4j.util.DateUtil; import com.foxinmy.weixin4j.util.DigestUtil; import com.foxinmy.weixin4j.util.MapUtil; import com.foxinmy.weixin4j.util.RandomUtil; import com.foxinmy.weixin4j.util.StringUtil; import com.foxinmy.weixin4j.xml.XmlStream; /** * V2支付工具类(JSAPI,NATIVE) * * @className PayUtil2 * @author jy * @date 2014年10月28日 * @since JDK 1.7 * @see */ public class PayUtil2 { /** * 生成V2.x版本JSAPI支付字符串 * * @param payPackage * 订单信息 * @param weixinAccount * 商户信息 * @return 支付json串 */ public static String createPayJsRequestJsonV2(PayPackageV2 payPackage, WeixinPayAccount weixinAccount) { if (StringUtil.isBlank(payPackage.getPartner())) { payPackage.setPartner(weixinAccount.getPartnerId()); } JsPayRequestV2 jsPayRequest = new JsPayRequestV2(weixinAccount, payPackage); jsPayRequest.setPaySign(paysignSha(jsPayRequest, weixinAccount.getPaySignKey())); jsPayRequest.setSignType(SignType.SHA1); return JSON.toJSONString(jsPayRequest); } /** * 生成V2.x版本JSAPI支付字符串 * * @param body * 支付详情 * @param orderNo * 订单号 * @param orderFee * 订单总额 按实际金额传入即可(元) 构造函数会转换为分 * @param ip * @param weixinAccount * 商户信息 * @return 支付json串 */ public static String createPayJsRequestJsonV2(String body, String orderNo, double orderFee, String notify_url, String ip, WeixinPayAccount weixinAccount) { PayPackageV2 payPackage = new PayPackageV2(body, orderNo,null, orderFee, notify_url, ip); payPackage.setPartner(weixinAccount.getPartnerId()); return createPayJsRequestJsonV2(payPackage, weixinAccount); } /** * sha签名(一般用于V2.x支付接口) * * @param obj * 签名对象 * @return */ public static String paysignSha(Object obj) { return DigestUtil.SHA1(MapUtil.toJoinString(obj, false, true, null)); } /** * sha签名(一般用于V2.x支付接口) * * @param obj * 签名对象 * @param paySignKey * 支付API的密钥<font color="red">请注意排序放进去的是put("appKey", * paySignKey)</font> * @return */ public static String paysignSha(Object obj, String paySignKey) { Map<String, String> extra = new HashMap<String, String>(); extra.put("appKey", paySignKey); return DigestUtil.SHA1(MapUtil.toJoinString(obj, false, true, extra)); } /** * 创建V2.x NativePay支付链接 * * @param weixinAccount * 商户信息 * @param productId * 与订单ID等价 * @return 支付链接 */ public static String createNativePayRequestURLV2( WeixinPayAccount weixinAccount, String productId) { Map<String, String> map = new HashMap<String, String>(); String timestamp = DateUtil.timestamp2string(); String noncestr = RandomUtil.generateString(16); map.put("appid", weixinAccount.getId()); map.put("timestamp", timestamp); map.put("noncestr", noncestr); map.put("productid", productId); map.put("appkey", weixinAccount.getPaySignKey()); String sign = paysignSha(map); return String .format("􏳈􏳈􏳈􏳈􏱗􏱗􏱗􏱗􏱕􏱕􏱕􏱕􏳉􏳉􏳉􏳉􏱕􏱕􏱕􏱕􏱩􏱩􏱩􏱩􏰛􏰛􏰛􏰛􏳊􏳊􏳊􏳊􏳊􏳊􏳊􏳊􏳈􏳈􏳈􏳈􏳉􏳉􏳉􏳉􏱶􏱶􏱶􏱶􏱓􏱓􏱓􏱓􏱭􏱭􏱭􏱭􏳊􏳊􏳊􏳊􏳋􏳋􏳋􏳋􏱕􏱕􏱕􏱕􏳌􏳌􏳌􏳌􏱶􏱶􏱶􏱶􏱓􏱓􏱓􏱓􏱭􏱭􏱭􏱭􏱰􏱰􏱰􏱰􏱨􏱨􏱨􏱨􏳍􏳍􏳍􏳍􏳎􏳎􏳎􏳎􏱱􏱱􏱱􏱱􏱕􏱕􏱕􏱕􏱦􏱦􏱦􏱦􏱩􏱩􏱩􏱩􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳞􏳞􏳞􏳞􏱓􏱓􏱓􏱓􏱶􏱶􏱶􏱶􏱶􏱶􏱶􏱶􏱕􏱕􏱕􏱕􏱪􏱪􏱪􏱪􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳞􏳞􏳞􏳞􏱶􏱶􏱶􏱶􏱨􏱨􏱨􏱨􏳟􏳟􏳟􏳟􏱪􏱪􏱪􏱪􏱰􏱰􏱰􏱰􏱷􏱷􏱷􏱷􏱔􏱔􏱔􏱔􏱕􏱕􏱕􏱕􏱪􏱪􏱪􏱪􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳞􏳞􏳞􏳞􏱔􏱔􏱔􏱔􏱕􏱕􏱕􏱕􏳈􏳈􏳈􏳈􏱗􏱗􏱗􏱗􏱕􏱕􏱕􏱕􏳉􏳉􏳉􏳉􏱕􏱕􏱕􏱕􏱩􏱩􏱩􏱩􏰛􏰛􏰛􏰛􏳊􏳊􏳊􏳊􏳊􏳊􏳊􏳊􏳈􏳈􏳈􏳈􏳉􏳉􏳉􏳉􏱶􏱶􏱶􏱶􏱓􏱓􏱓􏱓􏱭􏱭􏱭􏱭􏳊􏳊􏳊􏳊􏳋􏳋􏳋􏳋􏱕􏱕􏱕􏱕􏳌􏳌􏳌􏳌􏱶􏱶􏱶􏱶􏱓􏱓􏱓􏱓􏱭􏱭􏱭􏱭􏱰􏱰􏱰􏱰􏱨􏱨􏱨􏱨􏳍􏳍􏳍􏳍􏳎􏳎􏳎􏳎􏱱􏱱􏱱􏱱􏱕􏱕􏱕􏱕􏱦􏱦􏱦􏱦􏱩􏱩􏱩􏱩􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳞􏳞􏳞􏳞􏱓􏱓􏱓􏱓􏱶􏱶􏱶􏱶􏱶􏱶􏱶􏱶􏱕􏱕􏱕􏱕􏱪􏱪􏱪􏱪􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳞􏳞􏳞􏳞􏱶􏱶􏱶􏱶􏱨􏱨􏱨􏱨􏳟􏳟􏳟􏳟􏱪􏱪􏱪􏱪􏱰􏱰􏱰􏱰􏱷􏱷􏱷􏱷􏱔􏱔􏱔􏱔􏱕􏱕􏱕􏱕􏱪􏱪􏱪􏱪􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳞􏳞􏳞􏳞􏱔􏱔􏱔􏱔􏱕􏱕􏱕􏱕􏳠􏳠􏳠􏳠􏱗􏱗􏱗􏱗􏱱􏱱􏱱􏱱􏱔􏱔􏱔􏱔􏱓􏱓􏱓􏱓􏳠􏳠􏳠􏳠􏱶􏱶􏱶􏱶􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳞􏳞􏳞􏳞􏱩􏱩􏱩􏱩􏳟􏳟􏳟􏳟􏱩􏱩􏱩􏱩􏱷􏱷􏱷􏱷􏱗􏱗􏱗􏱗􏱱􏱱􏱱􏱱􏱔􏱔􏱔􏱔􏱨􏱨􏱨􏱨􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝weixin://wxpay/bizpayurl?sign=%s&appid=%s&productid=%s&timestamp=%s&nocestr=%s􏳠􏳠􏳠􏳠􏱗􏱗􏱗􏱗􏱱􏱱􏱱􏱱􏱔􏱔􏱔􏱔􏱓􏱓􏱓􏱓􏳠􏳠􏳠􏳠􏱶􏱶􏱶􏱶􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳞􏳞􏳞􏳞􏱩􏱩􏱩􏱩􏳟􏳟􏳟􏳟􏱩􏱩􏱩􏱩􏱷􏱷􏱷􏱷􏱗􏱗􏱗􏱗􏱱􏱱􏱱􏱱􏱔􏱔􏱔􏱔􏱨􏱨􏱨􏱨􏳜􏳜􏳜􏳜􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝􏳝", sign, weixinAccount.getId(), productId, timestamp, noncestr); } /** * 创建V2.x NATIVE回调时的响应字符串 * * @param weixinAccount * 商户信息 * @param payPackage * 订单信息 * @return */ public static String createNativePayResponseV2( WeixinPayAccount weixinAccount, PayPackageV2 payPackage) { NativePayResponseV2 payRequest = new NativePayResponseV2(weixinAccount, payPackage); Map<String, String> map = new HashMap<String, String>(); String timestamp = DateUtil.timestamp2string(); String noncestr = RandomUtil.generateString(16); map.put("appid", weixinAccount.getId()); map.put("appkey", weixinAccount.getPaySignKey()); map.put("timestamp", timestamp); map.put("noncestr", noncestr); map.put("package", payRequest.getPackageInfo()); map.put("retcode", payRequest.getRetCode()); map.put("reterrmsg", payRequest.getRetMsg()); payRequest.setPaySign(paysignSha(map)); return XmlStream.toXML(payRequest); } private static String JSAPIV2() { WeixinPayAccount weixinAccount = JSON.parseObject( Weixin4jConfigUtil.getValue("account"), WeixinPayAccount.class); return createPayJsRequestJsonV2("支付测试", "JSAPI01", 0.01d, "127.0.0.0", "http://127.0.0.1/jsapi/notify", weixinAccount); } private static String NATIVEV2() { WeixinPayAccount weixinAccount = JSON.parseObject( Weixin4jConfigUtil.getValue("account"), WeixinPayAccount.class); return createNativePayRequestURLV2(weixinAccount, "P1"); } public static void main(String[] args) throws PayException { // V2版本下的JS支付 System.out.println(JSAPIV2()); // V2版本下的原生支付 System.out.println(NATIVEV2()); } }
<reponame>gyohuangxin/cluster-ingress-operator /* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package markers import ( "bytes" "errors" "fmt" "reflect" "strconv" "strings" "sync" sc "text/scanner" "unicode" "sigs.k8s.io/controller-tools/pkg/loader" ) // expect checks that the next token of the scanner is the given token, adding an error // to the scanner if not. It returns whether the token was as expected. func expect(scanner *sc.Scanner, expected rune, errDesc string) bool { tok := scanner.Scan() if tok != expected { scanner.Error(scanner, fmt.Sprintf("expected %s, got %q", errDesc, scanner.TokenText())) return false } return true } var ( // interfaceType is a pre-computed reflect.Type representing the empty interface. interfaceType = reflect.TypeOf((*interface{})(nil)).Elem() rawArgsType = reflect.TypeOf((*RawArguments)(nil)).Elem() ) // lowerCamelCase converts PascalCase string to // a camelCase string (by lowering the first rune). func lowerCamelCase(in string) string { isFirst := true return strings.Map(func(inRune rune) rune { if isFirst { isFirst = false return unicode.ToLower(inRune) } return inRune }, in) } // RawArguments is a special type that can be used for a marker // to receive *all* raw, underparsed argument data for a marker. // You probably want to use `interface{}` to match any type instead. // Use *only* for legacy markers that don't follow Definition's normal // parsing logic. It should *not* be used as a field in a marker struct. type RawArguments []byte // ArgumentType is the kind of a marker argument type. // It's roughly analogous to a subset of reflect.Kind, with // an extra "AnyType" to represent the empty interface. type ArgumentType int const ( // Invalid represents a type that can't be parsed, and should never be used. InvalidType ArgumentType = iota // IntType is an int IntType // StringType is a string StringType // BoolType is a bool BoolType // AnyType is the empty interface, and matches the rest of the content AnyType // SliceType is any slice constructed of the ArgumentTypes SliceType // RawType represents content that gets passed directly to the marker // without any parsing. It should *only* be used with anonymous markers. RawType ) // Argument is the type of a marker argument. type Argument struct { Type ArgumentType Optional bool Pointer bool ItemType *Argument } // typeString contains the internals of TypeString. func (a Argument) typeString(out *strings.Builder) { if a.Pointer { out.WriteRune('*') } switch a.Type { case InvalidType: out.WriteString("<invalid>") case IntType: out.WriteString("int") case StringType: out.WriteString("string") case BoolType: out.WriteString("bool") case AnyType: out.WriteString("<any>") case SliceType: out.WriteString("[]") // arguments can't be non-pointer optional, so just call into typeString again. a.ItemType.typeString(out) case RawType: out.WriteString("<raw>") } } // TypeString returns a string roughly equivalent // (but not identical) to the underlying Go type that // this argument would parse to. It's mainly useful // for user-friendly formatting of this argument (e.g. // help strings). func (a Argument) TypeString() string { out := &strings.Builder{} a.typeString(out) return out.String() } func (a Argument) String() string { if a.Optional { return fmt.Sprintf("<optional arg %s>", a.TypeString()) } return fmt.Sprintf("<arg %s>", a.TypeString()) } // castAndSet casts val to out's type if needed, // then sets out to val. func castAndSet(out, val reflect.Value) { outType := out.Type() if outType != val.Type() { val = val.Convert(outType) } out.Set(val) } // makeSliceType makes a reflect.Type for a slice of the given type. // Useful for constructing the out value for when AnyType's guess returns a slice. func makeSliceType(itemType Argument) (reflect.Type, error) { var itemReflectedType reflect.Type switch itemType.Type { case IntType: itemReflectedType = reflect.TypeOf(int(0)) case StringType: itemReflectedType = reflect.TypeOf("") case BoolType: itemReflectedType = reflect.TypeOf(false) case SliceType: subItemType, err := makeSliceType(*itemType.ItemType) if err != nil { return nil, err } itemReflectedType = subItemType default: return nil, fmt.Errorf("invalid type when constructing guessed slice out: %v", itemType.Type) } if itemType.Pointer { itemReflectedType = reflect.PtrTo(itemReflectedType) } return reflect.SliceOf(itemReflectedType), nil } // guessType takes an educated guess about the type of the next field. If allowSlice // is false, it will not guess slices. It's less efficient than parsing with actual // type information, since we need to allocate to peek ahead full tokens, and the scanner // only allows peeking ahead one character. func guessType(scanner *sc.Scanner, raw string, allowSlice bool) *Argument { if allowSlice { maybeItem := guessType(scanner, raw, false) subRaw := raw[scanner.Pos().Offset:] subScanner := parserScanner(subRaw, scanner.Error) var tok rune for tok = subScanner.Scan(); tok != ',' && tok != sc.EOF && tok != ';'; tok = subScanner.Scan() { // wait till we get something interesting } // semicolon means it's a legacy slice if tok == ';' { return &Argument{ Type: SliceType, ItemType: maybeItem, } } return maybeItem } // first, try the easy case -- quoted strings strings hint := scanner.Peek() switch hint { case '"', '\'', '`': return &Argument{Type: StringType} } // everything else needs a duplicate scanner to scan properly // (so we don't consume our scanner tokens until we actually // go to use this -- Go doesn't like scanners that can be rewound). subRaw := raw[scanner.Pos().Offset:] subScanner := parserScanner(subRaw, scanner.Error) // next, check for slices if hint == '{' { subScanner.Scan() return &Argument{ Type: SliceType, ItemType: guessType(subScanner, subRaw, false), } } // then, bools... probablyString := false if hint == 't' || hint == 'f' { // maybe a bool if nextTok := subScanner.Scan(); nextTok == sc.Ident { switch subScanner.TokenText() { case "true", "false": // definitely a bool return &Argument{Type: BoolType} } // probably a string probablyString = true } else { // we shouldn't ever get here scanner.Error(scanner, fmt.Sprintf("got a token (%q) that looked like an ident, but was not", scanner.TokenText())) return &Argument{Type: InvalidType} } } if !probablyString { if nextTok := subScanner.Scan(); nextTok == sc.Int { return &Argument{Type: IntType} } } // otherwise assume bare strings return &Argument{Type: StringType} } // parseString parses either of the two accepted string forms (quoted, or bare tokens). func (a *Argument) parseString(scanner *sc.Scanner, raw string, out reflect.Value) { // strings are a bit weird -- the "easy" case is quoted strings (tokenized as strings), // the "hard" case (present for backwards compat) is a bare sequence of tokens that aren't // a comma. tok := scanner.Scan() if tok == sc.String || tok == sc.RawString { // the easy case val, err := strconv.Unquote(scanner.TokenText()) if err != nil { scanner.Error(scanner, fmt.Sprintf("unable to parse string: %v", err)) return } castAndSet(out, reflect.ValueOf(val)) return } // the "hard" case -- bare tokens not including ',' (the argument // separator), ';' (the slice separator), or '}' (delimitted slice // ender) startPos := scanner.Position.Offset for hint := scanner.Peek(); hint != ',' && hint != ';' && hint != '}' && hint != sc.EOF; hint = scanner.Peek() { // skip this token scanner.Scan() } endPos := scanner.Position.Offset + len(scanner.TokenText()) castAndSet(out, reflect.ValueOf(raw[startPos:endPos])) } // parseSlice parses either of the two slice forms (curly-brace-delimitted and semicolon-separated). func (a *Argument) parseSlice(scanner *sc.Scanner, raw string, out reflect.Value) { // slices have two supported formats, like string: // - `{val, val, val}` (preferred) // - `val;val;val` (legacy) resSlice := reflect.Zero(out.Type()) elem := reflect.Indirect(reflect.New(out.Type().Elem())) // preferred case if scanner.Peek() == '{' { // NB(directxman12): supporting delimitted slices in bare slices // would require an extra look-ahead here :-/ scanner.Scan() // skip '{' for hint := scanner.Peek(); hint != '}' && hint != sc.EOF; hint = scanner.Peek() { a.ItemType.parse(scanner, raw, elem, true) resSlice = reflect.Append(resSlice, elem) tok := scanner.Peek() if tok == '}' { break } if !expect(scanner, ',', "comma") { return } } if !expect(scanner, '}', "close curly brace") { return } castAndSet(out, resSlice) return } // legacy case for hint := scanner.Peek(); hint != ',' && hint != '}' && hint != sc.EOF; hint = scanner.Peek() { a.ItemType.parse(scanner, raw, elem, true) resSlice = reflect.Append(resSlice, elem) tok := scanner.Peek() if tok == ',' || tok == '}' || tok == sc.EOF { break } scanner.Scan() if tok != ';' { scanner.Error(scanner, fmt.Sprintf("expected comma, got %q", scanner.TokenText())) return } } castAndSet(out, resSlice) } // parse functions like Parse, except that it allows passing down whether or not we're // already in a slice, to avoid duplicate legacy slice detection for AnyType func (a *Argument) parse(scanner *sc.Scanner, raw string, out reflect.Value, inSlice bool) { // nolint:gocyclo if a.Type == InvalidType { scanner.Error(scanner, fmt.Sprintf("cannot parse invalid type")) return } if a.Pointer { out.Set(reflect.New(out.Type().Elem())) out = reflect.Indirect(out) } switch a.Type { case RawType: // raw consumes everything else castAndSet(out, reflect.ValueOf(raw[scanner.Pos().Offset:])) // consume everything else for tok := scanner.Scan(); tok != sc.EOF; tok = scanner.Scan() { } case IntType: if !expect(scanner, sc.Int, "integer") { return } // TODO(directxman12): respect the size when parsing val, err := strconv.Atoi(scanner.TokenText()) if err != nil { scanner.Error(scanner, fmt.Sprintf("unable to parse integer: %v", err)) return } castAndSet(out, reflect.ValueOf(val)) case StringType: // strings are a bit weird -- the "easy" case is quoted strings (tokenized as strings), // the "hard" case (present for backwards compat) is a bare sequence of tokens that aren't // a comma. a.parseString(scanner, raw, out) case BoolType: if !expect(scanner, sc.Ident, "true or false") { return } switch scanner.TokenText() { case "true": castAndSet(out, reflect.ValueOf(true)) case "false": castAndSet(out, reflect.ValueOf(false)) default: scanner.Error(scanner, fmt.Sprintf("expected true or false, got %q", scanner.TokenText())) return } case AnyType: guessedType := guessType(scanner, raw, !inSlice) newOut := out if guessedType.Type == SliceType { // we need to be able to construct the right element types, below // in parse, so construct a concretely-typed value to use as "out" newType, err := makeSliceType(*guessedType.ItemType) if err != nil { scanner.Error(scanner, err.Error()) return } newOut = reflect.Indirect(reflect.New(newType)) } if !newOut.CanSet() { panic("at the disco") } guessedType.Parse(scanner, raw, newOut) castAndSet(out, newOut) case SliceType: // slices have two supported formats, like string: // - `{val, val, val}` (preferred) // - `val;val;val` (legacy) a.parseSlice(scanner, raw, out) } } // Parse attempts to consume the argument from the given scanner (based on the given // raw input as well for collecting ranges of content), and places the output value // in the given reflect.Value. Errors are reported via the given scanner. func (a *Argument) Parse(scanner *sc.Scanner, raw string, out reflect.Value) { a.parse(scanner, raw, out, false) } // ArgumentFromType constructs an Argument by examining the given // raw reflect.Type. It can construct arguments from the Go types // corresponding to any of the types listed in ArgumentType. func ArgumentFromType(rawType reflect.Type) (Argument, error) { if rawType == rawArgsType { return Argument{ Type: RawType, }, nil } if rawType == interfaceType { return Argument{ Type: AnyType, }, nil } arg := Argument{} if rawType.Kind() == reflect.Ptr { rawType = rawType.Elem() arg.Pointer = true arg.Optional = true } switch rawType.Kind() { case reflect.String: arg.Type = StringType case reflect.Int, reflect.Int32: // NB(directxman12): all ints in kubernetes are int32, so explicitly support that arg.Type = IntType case reflect.Bool: arg.Type = BoolType case reflect.Slice: arg.Type = SliceType itemType, err := ArgumentFromType(rawType.Elem()) if err != nil { return Argument{}, fmt.Errorf("bad slice item type: %v", err) } arg.ItemType = &itemType default: return Argument{}, fmt.Errorf("type has unsupported kind %s", rawType.Kind()) } return arg, nil } // TargetType describes which kind of node a given marker is associated with. type TargetType int const ( // DescribesPackage indicates that a marker is associated with a package. DescribesPackage TargetType = iota // DescribesType indicates that a marker is associated with a type declaration. DescribesType // DescribesField indicates that a marker is associated with a struct field. DescribesField ) // Definition is a parsed definition of a marker. type Definition struct { // Output is the deserialized Go type of the marker. Output reflect.Type // Name is the marker's name. Name string // Target indicates which kind of node this marker can be associated with. Target TargetType // Fields lists out the types of each field that this marker has, by // argument name as used in the marker (if the output type isn't a struct, // it'll have a single, blank field name). This only lists exported fields, // (as per reflection rules). Fields map[string]Argument // FieldNames maps argument names (as used in the marker) to struct field name // in the output type. FieldNames map[string]string // Strict indicates that this definition should error out when parsing if // not all non-optional fields were seen. Strict bool } // AnonymousField indicates that the definition has one field, // (actually the original object), and thus the field // doesn't get named as part of the name. func (d *Definition) AnonymousField() bool { if len(d.Fields) != 1 { return false } _, hasAnonField := d.Fields[""] return hasAnonField } // Empty indicates that this definition has no fields. func (d *Definition) Empty() bool { return len(d.Fields) == 0 } // loadFields uses reflection to populate argument information from the Output type. func (d *Definition) loadFields() error { if d.Fields == nil { d.Fields = make(map[string]Argument) d.FieldNames = make(map[string]string) } if d.Output.Kind() != reflect.Struct { // anonymous field type argType, err := ArgumentFromType(d.Output) if err != nil { return err } d.Fields[""] = argType d.FieldNames[""] = "" return nil } for i := 0; i < d.Output.NumField(); i++ { field := d.Output.Field(i) if field.PkgPath != "" { // as per the reflect package docs, pkgpath is empty for exported fields, // so non-empty package path means a private field, which we should skip continue } argName := lowerCamelCase(field.Name) markerTag, tagSpecified := field.Tag.Lookup("marker") markerTagParts := strings.Split(markerTag, ",") if tagSpecified && markerTagParts[0] != "" { // allow overriding to support legacy cases where we don't follow camelCase conventions argName = markerTagParts[0] } argType, err := ArgumentFromType(field.Type) if err != nil { return fmt.Errorf("unable to extract type information for field %q: %v", field.Name, err) } if argType.Type == RawType { return fmt.Errorf("RawArguments must be the direct type of a marker, and not a field") } for _, tagOption := range markerTagParts[1:] { switch tagOption { case "optional": argType.Optional = true } } d.Fields[argName] = argType d.FieldNames[argName] = field.Name } return nil } // parserScanner makes a new scanner appropriate for use in parsing definitions and arguments. func parserScanner(raw string, err func(*sc.Scanner, string)) *sc.Scanner { scanner := &sc.Scanner{} scanner.Init(bytes.NewBufferString(raw)) scanner.Mode = sc.ScanIdents | sc.ScanInts | sc.ScanStrings | sc.ScanRawStrings | sc.SkipComments scanner.Error = err return scanner } // Parse uses the type information in this Definition to parse the given // raw marker in the form `+a:b:c=arg,d=arg` into an output object of the // type specified in the definition. func (d *Definition) Parse(rawMarker string) (interface{}, error) { name, anonName, fields := splitMarker(rawMarker) out := reflect.Indirect(reflect.New(d.Output)) // if we're a not a struct or have no arguments, treat the full `a:b:c` as the name, // otherwise, treat `c` as a field name, and `a:b` as the marker name. if !d.AnonymousField() && !d.Empty() && len(anonName) >= len(name)+1 { fields = anonName[len(name)+1:] + "=" + fields } var errs []error scanner := parserScanner(fields, func(_ *sc.Scanner, msg string) { errs = append(errs, errors.New(msg)) }) // TODO(directxman12): strict parsing where we error out if certain fields aren't optional seen := make(map[string]struct{}, len(d.Fields)) if d.AnonymousField() && scanner.Peek() != sc.EOF { // no need for trying to parse field names if we're not a struct field := d.Fields[""] field.Parse(scanner, fields, out) seen[""] = struct{}{} // mark as seen for strict definitions } else if !d.Empty() && scanner.Peek() != sc.EOF { // if we expect *and* actually have arguments passed for { // parse the argument name if !expect(scanner, sc.Ident, "argument name") { break } argName := scanner.TokenText() if !expect(scanner, '=', "equals") { break } // make sure we know the field fieldName, known := d.FieldNames[argName] if !known { scanner.Error(scanner, fmt.Sprintf("unknown argument %q", argName)) break } fieldType, known := d.Fields[argName] if !known { scanner.Error(scanner, fmt.Sprintf("unknown argument %q", argName)) break } seen[argName] = struct{}{} // mark as seen for strict definitions // parse the field value fieldVal := out.FieldByName(fieldName) if !fieldVal.CanSet() { scanner.Error(scanner, fmt.Sprintf("cannot set field %q (might not exist)", fieldName)) break } fieldType.Parse(scanner, fields, fieldVal) if len(errs) > 0 { break } if scanner.Peek() == sc.EOF { break } if !expect(scanner, ',', "comma") { break } } } if tok := scanner.Scan(); tok != sc.EOF { scanner.Error(scanner, fmt.Sprintf("extra arguments provided: %q", fields[scanner.Position.Offset:])) } if d.Strict { for argName, arg := range d.Fields { if _, wasSeen := seen[argName]; !wasSeen && !arg.Optional { scanner.Error(scanner, fmt.Sprintf("missing argument %q", argName)) } } } return out.Interface(), loader.MaybeErrList(errs) } // MakeDefinition constructs a definition from a name, type, and the output type. // All such definitions are strict by default. If a struct is passed as the output // type, its public fields will automatically be populated into Fields (and similar // fields in Definition). Other values will have a single, empty-string-named Fields // entry. func MakeDefinition(name string, target TargetType, output interface{}) (*Definition, error) { def := &Definition{ Name: name, Target: target, Output: reflect.TypeOf(output), Strict: true, } if err := def.loadFields(); err != nil { return nil, err } return def, nil } // splitMarker takes a marker in the form of `+a:b:c=arg,d=arg` and splits it // into the name (`a:b`), the name if it's not a struct (`a:b:c`), and the parts // that are definitely fields (`arg,d=arg`). func splitMarker(raw string) (name string, anonymousName string, restFields string) { raw = raw[1:] // get rid of the leading '+' nameFieldParts := strings.SplitN(raw, "=", 2) if len(nameFieldParts) == 1 { return nameFieldParts[0], nameFieldParts[0], "" } anonymousName = nameFieldParts[0] name = anonymousName restFields = nameFieldParts[1] nameParts := strings.Split(name, ":") if len(nameParts) > 1 { name = strings.Join(nameParts[:len(nameParts)-1], ":") } return name, anonymousName, restFields } // Registry keeps track of registered definitions, and allows for easy lookup. // It's thread-safe, and the zero-value can be safely used. type Registry struct { forPkg map[string]*Definition forType map[string]*Definition forField map[string]*Definition mu sync.RWMutex initOnce sync.Once } func (r *Registry) init() { r.initOnce.Do(func() { if r.forPkg == nil { r.forPkg = make(map[string]*Definition) } if r.forType == nil { r.forType = make(map[string]*Definition) } if r.forField == nil { r.forField = make(map[string]*Definition) } }) } // Define defines a new marker with the given name, target, and output type. // It's a shortcut around // r.Register(MakeDefinition(name, target, obj)) func (r *Registry) Define(name string, target TargetType, obj interface{}) error { def, err := MakeDefinition(name, target, obj) if err != nil { return err } return r.Register(def) } // Register registers the given marker definition with this registry for later lookup. func (r *Registry) Register(def *Definition) error { r.init() r.mu.Lock() defer r.mu.Unlock() switch def.Target { case DescribesPackage: r.forPkg[def.Name] = def case DescribesType: r.forType[def.Name] = def case DescribesField: r.forField[def.Name] = def default: return fmt.Errorf("unknown target type %v", def.Target) } return nil } // Lookup fetches the definition corresponding to the given name and target type. func (r *Registry) Lookup(name string, target TargetType) *Definition { r.init() r.mu.RLock() defer r.mu.RUnlock() switch target { case DescribesPackage: return tryAnonLookup(name, r.forPkg) case DescribesType: return tryAnonLookup(name, r.forType) case DescribesField: return tryAnonLookup(name, r.forField) default: return nil } } // AllDefinitions returns all marker definitions known to this registry. func (r *Registry) AllDefinitions() []*Definition { res := make([]*Definition, 0, len(r.forPkg)+len(r.forType)+len(r.forField)) for _, def := range r.forPkg { res = append(res, def) } for _, def := range r.forType { res = append(res, def) } for _, def := range r.forField { res = append(res, def) } return res } // tryAnonLookup tries looking up the given marker as both an struct-based // marker and an anonymous marker, returning whichever format matches first, // preferring the longer (anonymous) name in case of conflicts. func tryAnonLookup(name string, defs map[string]*Definition) *Definition { // NB(directxman12): we look up anonymous names first to work with // legacy style marker definitions that have a namespaced approach // (e.g. deepcopy-gen, which uses `+k8s:deepcopy-gen=foo,bar` *and* // `+k8s.io:deepcopy-gen:interfaces=foo`). name, anonName, _ := splitMarker(name) if def, exists := defs[anonName]; exists { return def } return defs[name] } // Must panics on errors creating definitions. func Must(def *Definition, err error) *Definition { if err != nil { panic(err) } return def }
40 of the Best Street Photos of India by Indian Photographers Halla_photo_contests Blocked Unblock Follow Following Oct 23, 2017 Indian streets are perfect for street photography. On the street you can see colorful and unique life of India. So, we asked photographers to submit 1 real, instant image that capture moments, people, faces which you see on streets for our STREETS OF INDIA photo contest. Till now we got more than 100 entries for contest. We’ve put together a collection of 40 of our favorite street photographs. The list covers amazing amateur photographers from all over the India. Through these photos, the photographers represent their beautiful hometowns or sharing their impressions while traveling. It’s always inspiring for us to see a versatile album of fantastic photographs, we hope you’re inspired by viewing this collection as well. 1. Mystery About Street…Photo by Sumeet Rajak You never know what you can get on the streets as there is subject every where and you only need the vision to capture it. 2. Festival time street…Photo by Harshali Jain 3. GAZE…Photo by Zulfikhar Ahmed Shri Vitthal Birdev Annual Yatra 2017. 4. Streets of india…Photo by Suraj Maurya Those leading lines. 5. Bubble girl…Photo by Komal Dhayagonde I was chasing bubbles, ended with this shot in colaba, mumbai. 6. The lone spectator…Photo by Kingshuk Mukherjee A homeless man observes the daily monotonous life on the streets. 7. The Bubbleman…Photo by Rajat Kumar Singh 8. Musafir…Photo by Jayesh pawar 9. Streets of India…photo by Deepesh Arya This picture was clicked on the streets of Kota city, and it’s a candid moment that I have clicked. 10. HOUSEWIFE…Photo by Jackie Prajapati Ladies bring dual water to their family. 11. Graffiti…Photo by SANCHARI MITRA Photo taken at ghats of Benaras, India. 12. FEEDING HOUR…Photo by Chinmoy Biswas A MAN IS FEEDING THE PIGEONS BY THE ROAD. 13. Life with life…Photo by Ankush Jangid 14. Joy of rhythm…Photo by Riddhi Trivedi A picture which gives justice to the level of adrenaline rush of people while Ganesh Chatturthi in Maharashtra, Specially in Pune for this picture. 15. Family affairs…Photo by Partha Pratim Saha 16. City of Joy…Photo by Debashis Mukherjee Chitpur Road a historical road of old Kolkata. 17. Attraction…Photo by Ghanshyam Kahar Glamour are attrect to all now. 18. Street photography…Photo by Chirag Kabira 19. Joyride…Photo by Omesh Meena I captured this moment while I was on the roof of my home and these three children were enjoying their Fun ride. it reflects their joy of togetherness and true spirit of childhood where every moment is shared with friends . Another aspect of this photograph shows need of public space for public recreation because their riding on the road poses threats of accident . 20. Don’t watch the clock;Do what it does. KEEP GOING..Photo by Dhrumil Pandya Make a lifetime commitment someone who does respect your relationship with GOD.. 21. Rath Yatra…Photo by Arjit Chowdhury Children are ready to blow the Rath. 22. Mother…Photo by Kausik Paul This photograph was taken at Kumartuli Kolkata, An artist is making an Durga idol before puja at that time an old lady was passing by , I captured the contrasting theme of two mothers. 23. Steers of indore…Photo by Er Avi Nitin Jain 24. Streets of India…Photo by Avijit Chakraborty Reflections on the streets of India. 25. Everyone is busy…Photo by Abhishek Verma Peoples in the frame are those who are daily wages labor who use to live on the road side. 26. Basic needs…Photo by Harshal Barot Indian basic needs. People are for such type of food. We must help them. Also some people are happy with these. And many are there who just waste it. :( 27. STREET OF CALCUTTA…Photo by Subhadip Sarkar 28. Dadar the heart of Mumbai.Photo by Ravi Teja One of the busiest streets of Mumbai. 29. Streets of Kolkata…Photo by Pranay Pariyar Slow shutter shot taken in the streets of Kumartuli in Kolkata. 30. Gateway Of India…Photo by Vaibhav Sutar Monsoon Click Mumbai.. 31. Sudden Attack…Photo by Sudipta Das A man passing in front of the lion on the occasion of Durga puja gives a perfect juxtra photograph at streets of kolkata. 32. See new,click new…Photo by Bhagyesh Patel 33. UNWANTED TROUBLE…Photo by Arghya Bhakta 34. READY TO MOVE…Photo by Ahilan Chinnadurai Autorickshaws waiting for passengers on a hot summer afternoon near Kothandaramaswamy Temple in Ramanthapuram district, Tamil Nadu. 35. Streets of Mumbai. We live our dreams here…Photo by Vrinda Agrawal 36. God Art vs Human Art…Photo by Sachin Chauhan I clicked this Photograph in the streets of Chandni Chowk. 37. A tale of twin cities …Photo by Sayan Raha Clicked this shot from a roof top of a building . Twin cities that means Kolkata and Howrah . 38. Virtual Photographer…Photo by Swapnil Trivedi It Happens only in streets Of India this little child was capturing me with his virtual camera. 39. Mumbai rain or pain…Photo by Pratik Y Kerkar It’s is raining even after the season has ended.. 40. Beauty And The Beast…Photo by Ayush I took this on a recent photowalk. I was amazed by the beauty of the sunset but deeply saddened by the waste littering around. People were busy living their life utterly indifferent to the surroundings.
If any NBA players want to follow in Colin Kaepernick's footsteps and kneel during the national anthem this season, they could be subject to discipline from the league. Citing the league's rule stating players must stand for the anthem, NBA commissioner Adam Silver recently said, "My expectation is that our players will continue to stand for the anthem." Now, the NBA has sent a memo (the existence of which CBS Sports can confirm via league sources) to teams reinforcing the league's expectation that players stand for the national anthem. Specifically, the league wanted to clarify to teams that they are not able to waive the rule for any of their players, and the league will handle any discipline should a player not stand for the anthem. Via ESPN: The NBA league office late Friday sent a memo to teams reinforcing the rule that players and coaches must stand for the national anthem, and suggesting other ways in which teams might address the recent protest movement sweeping across the NFL and other sports, according to a copy of the memo obtained by ESPN. The memo, distributed by deputy commissioner Mark Tatum, instructs teams that "the league office will determine how to deal with any possible instance in which a player, coach, or trainer does not stand for the anthem." The memo states that individual teams "do not have the discretion to waive" the rule that players, coaches, and staff must stand for the anthem. The league has the discretion to discipline players who violate the rule. In addition, the memo outlined alternative measures, such as pre-game videos, PSAs, or an address to the crowd by a player or coach that teams could use to promote unity. In the memo, Tatum suggests teams might address the current political climate by having players and coaches give a joint pregame address. "This could include a message of unity and how the team is committed to bringing the community together this season," the memo states. The memo also suggests teams might prepare a video tribute or public service announcement featuring "team leadership speaking about the issues they care about." Throughout the past weeks, NBA players, including stars such as LeBron James, Stephen Curry and Chris Paul, have grown more and more comfortable in criticizing both President Donald Trump and the current societal and political state of the country. Should a player or team decide to protest during the anthem, it will be quite interesting to see how the NBA handles it, and what punishment it decides to hand out.
def ClockAngle(h,m): hAngle = 0.5 * ( 60 * h + m ) mAngle = 6 * m print ('angle between Hour and Minute clock for ' , h , ' hour ', m ,' minute = ' , hAngle - mAngle) ClockAngle(5,24) <EMAIL>@blr-<EMAIL>:[~/Downloads/FromEarlierMac/amund/Downloads/Scripts/Python]$ python ClockAngle.py ('angle between Hour and Minute clock for ', 5, ' hour ', 24, ' minute = ', 18.0)
Ottawa bystanders render attempt to render first aid to shooting victim President Obama says he's "shaken" by this week's violent attacks on three soldiers in Ottawa by an Islamic jihadist. He immediately phoned Canadian Prime Minister Stephen Harper to offer support and "solidarity." He vowed to "remain vigilant." Too bad Obama didn't show the same resolve after multiple attacks and plots against our troops by Muslim terrorists on our soil. And I'm not just talking about the "workplace violence" of jihadist Nidal Hasan, whose Koran-inspired Fort Hood rampage took the lives of 13 American servicemen and servicewomen and one unborn baby. An entire parade of infidel-hating fanatics targeted U.S. soldiers long before Islamic State barbarians issued threats against our military personnel and their families this fall. What happened in Canada—what ISIS wants worldwide—has been happening here for years under Barry-come-lately's watch. Where was President Obama when Muslim convert Muhammad Hussain was arrested in Maryland in 2010 after scheming to blow up an Army recruitment office to avenge his "Muslim brothers and sisters"? Hussain's message: "Whoever joins the military, they will be killed." He planned to "blow one recruiting center up ... then we hit another ... and just keep it movin' ... Insha' Allah. ... Insha' Allah. ... Do it for jihad." Next, Hussain told informants he would take on Andrews Air Force Base, blow it up and then take over the homes of military personnel. Where was President Obama when Ethiopian-born Muslim Marine reservist Jonathan Melaku—shouting "Allahu Akbar"—fired shots at the Marine Corps Museum in D.C., the Pentagon, a vacant Marine Corps recruiting station in Chantilly, Va., and a Coast Guard recruiting station in Woodbridge, Va., during a months-long jihad campaign in 2010-2011? Where was President Obama when Muslim Pvt. Naser Jason Abdo, who went AWOL from Fort Campbell, Ky., was arrested in 2011 with explosives, a gun and jihadi propaganda? Abdo, who shouted, "Nidal Hasan Fort Hood 2009!" after his arrest, planned to attack soldiers at a restaurant near the base. Where was President Obama when a gang of Islamic thugs in Newburgh, N.Y., was arrested plotting to "bring death to Jews" at nearby synagogues and "commit jihad" by shooting planes at the local Air National Guard base with Stinger surface-to-air guided missiles? Where was President Obama in June 2011 when Seattle jihadists Abu Khalid Abdul-Latif, also known as Joseph Anthony Davis of Seattle, and Walli Mujahidh, also known as Frederick Domingue Jr., were busted after plotting to attack the Military Entrance Processing Station in Seattle in a shooting and grenade spree. "Imagine how many young Muslims, if we're successful, will try to hit these kinds of centers," Abdul-Latif (an admirer of Fort Hood killer Nidal Hasan) exulted. "Imagine how fearful America will be, and they'll know they can't push the Muslims around." His bloodthirsty agenda: "Hopefully there will be more soldiers who come out of the woodwork to serve Allah." Where was President Obama when Muslim convert Muhammad Yusuf (a.k.a. Jose Pimentel) was caught by New York City police in 2011 building a pipe bomb he planned to use to kill police and U.S. soldiers returning from war in Iraq and Afghanistan. Yusuf declared that he would wage holy war in the Big Apple to show that "there (were) mujahideen in the city ready to fight jihad here." Where was President Obama when Rezwan Ferdaus was arrested in Framingham, Mass., in 2011 while planning an attack on the Pentagon and the U.S. Capitol with explosives-packed model airplanes, automatic weapons and grenades? Ferdaus declared his intent to "disable their military center," "cut off the military" and then "take care of the politicians." Where was President Obama after an Islamic terrorist ring in Raleigh, N.C., got busted in 2009 plotting to bomb military installations and die "as martyrs in furtherance of violent jihad"? They received overseas training, conducted surveillance of the Quantico, Va., Marine base and conspired to kidnap, maim and kill American targets abroad, as well. And where was President Obama in June 2009 when Abdul Hakim Mujahid Muhammad walked into an Arkansas Army recruiting center, murdered 24-year-old Pvt. William Long and gravely wounded 18-year-old Pvt. Quinton Ezeagwula? The Little Rock jihadist had planned on killing many more "infidel forces" in the name of Allah. "The U.S. has to pay for the rape, murder, bloodshed, blasphemy it has done and still doing to the Muslims and Islam," Muhammad warned. "So consider this a small retaliation the best is to come Allah willing. This is not the first attack and won't be the last." It took three days for the White House to issue a pathetic politically correct statement expressing "sadness" over the attack, which Obama opaquely described as a "senseless act of violence" (instead of the intentional, systematic act of Islamic terrorism that it was). In the same week, the Obama administration issued an immediate condemnation and statement of "outrage" over the shooting death of late-term abortionist George Tiller. Where was Obama? Sabotaging our borders, restricting our gun rights, working to free Gitmo jihadists, decrying Islamophobia, demonizing conservatives, welcoming jihad sympathizers to the White House and putting politics over national security. Now "shaken" over the death of a Canadian soldier killed by a jihadist, our president has barely stirred in response to the homegrown Islamic terror campaign against our men and women in uniform under his own aloof nose.
package com.ac.reserve.web.api.service.impl; import com.ac.reserve.web.api.po.Bill; import com.ac.reserve.web.api.mapper.BillMapper; import com.ac.reserve.web.api.service.BillService; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import org.springframework.stereotype.Service; import org.springframework.beans.factory.annotation.Autowired; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Service public class BillServiceImpl extends ServiceImpl<BillMapper, Bill> implements BillService { private static final Logger logger = LoggerFactory.getLogger(BillServiceImpl.class); @Autowired private BillMapper billMapper; }
<gh_stars>0 import unittest from collections import deque from .convert_sorted_array_to_binary_search_tree import Solution, TreeNode class Test(unittest.TestCase): @classmethod def setUpClass(cls) -> None: cls.sol = Solution() def get_list(self, root: TreeNode): stack, val = deque([root]), [] while len(stack) != 0: node = stack.popleft() val.append(node.val) if node.left: stack.append(node.left) if node.right: stack.append(node.right) return val def test_example1(self): nums = [-10, -3, 0, 5, 9] tree = self.sol.sortedArrayToBST(nums) self.assertEqual(self.get_list(tree), [0, -10, 5, -3, 9])
import { getVShapes } from './getVShapes'; function normalizeSpid(spid: string): string { const [, , id] = spid.split('_'); return id; } export function getVShapeSpid(document: Document, element: Element): string | null { if (element.tagName === 'IMG') { const vShapeId = element.getAttribute('v:shapes'); const vShapes = getVShapes(document); if (!vShapeId) { return null; } const vShapeSpid = vShapes[vShapeId]; if (vShapeSpid) { return normalizeSpid(vShapeSpid); } if ( element.parentElement && element.parentElement.parentElement && element.parentElement.parentElement.innerHTML.indexOf('msEquation') >= 0 ) { return null; } return normalizeSpid(vShapeId); } if (!element.parentElement) { return null; } const spid = element.parentElement.getAttribute('o:spid'); if (spid) { return normalizeSpid(spid); } return spid; }
def remove_stop_words(self, text : str) -> List[str]: stop_words = stopwords.words('english') stop_words += ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten', 'hundred', 'thousand', 'and'] stop_words += ['network', 'install', 'run', 'file', 'use', 'result', 'paper', 'python', 'using', 'code', 'model', 'train', 'implementation', 'use'] stop_words += ['data', 'dataset', 'example', 'build', 'learn', 'download', 'obj'] return [word for word in text if not word in stop_words]
/* * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import * as React from 'react'; import PipelineList from './PipelineList'; import TestUtils from '../TestUtils'; import { ApiPipeline } from '../apis/pipeline'; import { Apis } from '../lib/Apis'; import { PageProps } from './Page'; import { RoutePage, RouteParams } from '../components/Router'; import { shallow, ReactWrapper, ShallowWrapper } from 'enzyme'; import { range } from 'lodash'; import { ImportMethod } from '../components/UploadPipelineDialog'; describe('PipelineList', () => { let tree: ReactWrapper | ShallowWrapper; const updateBannerSpy = jest.fn(); const updateDialogSpy = jest.fn(); const updateSnackbarSpy = jest.fn(); const updateToolbarSpy = jest.fn(); const listPipelinesSpy = jest.spyOn(Apis.pipelineServiceApi, 'listPipelines'); const createPipelineSpy = jest.spyOn(Apis.pipelineServiceApi, 'createPipeline'); const deletePipelineSpy = jest.spyOn(Apis.pipelineServiceApi, 'deletePipeline'); const uploadPipelineSpy = jest.spyOn(Apis, 'uploadPipeline'); function generateProps(): PageProps { return { history: {} as any, location: '' as any, match: '' as any, toolbarProps: PipelineList.prototype.getInitialToolbarState(), updateBanner: updateBannerSpy, updateDialog: updateDialogSpy, updateSnackbar: updateSnackbarSpy, updateToolbar: updateToolbarSpy, }; } async function mountWithNPipelines(n: number): Promise<ReactWrapper> { listPipelinesSpy.mockImplementationOnce(() => ({ pipelines: range(n).map(i => ({ id: 'test-pipeline-id' + i, name: 'test pipeline name' + i })), })); tree = TestUtils.mountWithRouter(<PipelineList {...generateProps()} />); await listPipelinesSpy; await TestUtils.flushPromises(); tree.update(); // Make sure the tree is updated before returning it return tree; } beforeEach(() => { jest.clearAllMocks(); }); afterEach(async () => { // unmount() should be called before resetAllMocks() in case any part of the unmount life cycle // depends on mocks/spies await tree.unmount(); jest.resetAllMocks(); }); it('renders an empty list with empty state message', () => { tree = shallow(<PipelineList {...generateProps()} />); expect(tree).toMatchSnapshot(); }); it('renders a list of one pipeline', async () => { tree = shallow(<PipelineList {...generateProps()} />); tree.setState({ pipelines: [{ created_at: new Date(2018, 8, 22, 11, 5, 48), description: 'test pipeline description', name: 'pipeline1', parameters: [], } as ApiPipeline] }); await listPipelinesSpy; expect(tree).toMatchSnapshot(); }); it('renders a list of one pipeline with no description or created date', async () => { tree = shallow(<PipelineList {...generateProps()} />); tree.setState({ pipelines: [{ name: 'pipeline1', parameters: [], } as ApiPipeline] }); await listPipelinesSpy; expect(tree).toMatchSnapshot(); }); it('renders a list of one pipeline with error', async () => { tree = shallow(<PipelineList {...generateProps()} />); tree.setState({ pipelines: [{ created_at: new Date(2018, 8, 22, 11, 5, 48), description: 'test pipeline description', error: 'oops! could not load pipeline', name: 'pipeline1', parameters: [], } as ApiPipeline] }); await listPipelinesSpy; expect(tree).toMatchSnapshot(); }); it('calls Apis to list pipelines, sorted by creation time in descending order', async () => { listPipelinesSpy.mockImplementationOnce(() => ({ pipelines: [{ name: 'pipeline1' }] })); tree = TestUtils.mountWithRouter(<PipelineList {...generateProps()} />); await listPipelinesSpy; expect(listPipelinesSpy).toHaveBeenLastCalledWith('', 10, 'created_at desc', ''); expect(tree.state()).toHaveProperty('pipelines', [{ name: 'pipeline1' }]); }); it('has a Refresh button, clicking it refreshes the pipeline list', async () => { tree = await mountWithNPipelines(1); const instance = tree.instance() as PipelineList; expect(listPipelinesSpy.mock.calls.length).toBe(1); const refreshBtn = instance.getInitialToolbarState().actions.find(b => b.title === 'Refresh'); expect(refreshBtn).toBeDefined(); await refreshBtn!.action(); expect(listPipelinesSpy.mock.calls.length).toBe(2); expect(listPipelinesSpy).toHaveBeenLastCalledWith('', 10, 'created_at desc', ''); expect(updateBannerSpy).toHaveBeenLastCalledWith({}); }); it('shows error banner when listing pipelines fails', async () => { TestUtils.makeErrorResponseOnce(listPipelinesSpy, 'bad stuff happened'); tree = TestUtils.mountWithRouter(<PipelineList {...generateProps()} />); await listPipelinesSpy; await TestUtils.flushPromises(); expect(updateBannerSpy).toHaveBeenLastCalledWith(expect.objectContaining({ additionalInfo: 'bad stuff happened', message: 'Error: failed to retrieve list of pipelines. Click Details for more information.', mode: 'error', })); }); it('shows error banner when listing pipelines fails after refresh', async () => { tree = TestUtils.mountWithRouter(<PipelineList {...generateProps()} />); const instance = tree.instance() as PipelineList; const refreshBtn = instance.getInitialToolbarState().actions.find(b => b.title === 'Refresh'); expect(refreshBtn).toBeDefined(); TestUtils.makeErrorResponseOnce(listPipelinesSpy, 'bad stuff happened'); await refreshBtn!.action(); expect(listPipelinesSpy.mock.calls.length).toBe(2); expect(listPipelinesSpy).toHaveBeenLastCalledWith('', 10, 'created_at desc', ''); expect(updateBannerSpy).toHaveBeenLastCalledWith(expect.objectContaining({ additionalInfo: 'bad stuff happened', message: 'Error: failed to retrieve list of pipelines. Click Details for more information.', mode: 'error', })); }); it('hides error banner when listing pipelines fails then succeeds', async () => { TestUtils.makeErrorResponseOnce(listPipelinesSpy, 'bad stuff happened'); tree = TestUtils.mountWithRouter(<PipelineList {...generateProps()} />); const instance = tree.instance() as PipelineList; await listPipelinesSpy; await TestUtils.flushPromises(); expect(updateBannerSpy).toHaveBeenLastCalledWith(expect.objectContaining({ additionalInfo: 'bad stuff happened', message: 'Error: failed to retrieve list of pipelines. Click Details for more information.', mode: 'error', })); updateBannerSpy.mockReset(); const refreshBtn = instance.getInitialToolbarState().actions.find(b => b.title === 'Refresh'); listPipelinesSpy.mockImplementationOnce(() => ({ pipelines: [{ name: 'pipeline1' }] })); await refreshBtn!.action(); expect(listPipelinesSpy.mock.calls.length).toBe(2); expect(updateBannerSpy).toHaveBeenLastCalledWith({}); }); it('renders pipeline names as links to their details pages', async () => { tree = await mountWithNPipelines(1); const link = tree.find('a[children="test pipeline name0"]'); expect(link).toHaveLength(1); expect(link.prop('href')).toBe(RoutePage.PIPELINE_DETAILS.replace( ':' + RouteParams.pipelineId + '?', 'test-pipeline-id0' )); }); it('always has upload pipeline button enabled', async () => { tree = await mountWithNPipelines(1); const calls = updateToolbarSpy.mock.calls[0]; expect(calls[0].actions.find((b: any) => b.title === 'Upload pipeline')).not.toHaveProperty('disabled'); }); it('enables delete button when one pipeline is selected', async () => { tree = await mountWithNPipelines(1); tree.find('.tableRow').simulate('click'); expect(updateToolbarSpy.mock.calls).toHaveLength(2); // Initial call, then selection update const calls = updateToolbarSpy.mock.calls[1]; expect(calls[0].actions.find((b: any) => b.title === 'Delete')).toHaveProperty('disabled', false); }); it('enables delete button when two pipelines are selected', async () => { tree = await mountWithNPipelines(2); tree.find('.tableRow').at(0).simulate('click'); tree.find('.tableRow').at(1).simulate('click'); expect(updateToolbarSpy.mock.calls).toHaveLength(3); // Initial call, then selection updates const calls = updateToolbarSpy.mock.calls[2]; expect(calls[0].actions.find((b: any) => b.title === 'Delete')).toHaveProperty('disabled', false); }); it('re-disables delete button pipelines are unselected', async () => { tree = await mountWithNPipelines(1); tree.find('.tableRow').at(0).simulate('click'); tree.find('.tableRow').at(0).simulate('click'); expect(updateToolbarSpy.mock.calls).toHaveLength(3); // Initial call, then selection updates const calls = updateToolbarSpy.mock.calls[2]; expect(calls[0].actions.find((b: any) => b.title === 'Delete')).toHaveProperty('disabled', true); }); it('shows delete dialog when delete button is clicked', async () => { tree = await mountWithNPipelines(1); tree.find('.tableRow').at(0).simulate('click'); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; expect(call).toHaveProperty('title', 'Delete 1 pipeline?'); }); it('shows delete dialog when delete button is clicked, indicating several pipelines to delete', async () => { tree = await mountWithNPipelines(5); tree.find('.tableRow').at(0).simulate('click'); tree.find('.tableRow').at(2).simulate('click'); tree.find('.tableRow').at(3).simulate('click'); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; expect(call).toHaveProperty('title', 'Delete 3 pipelines?'); }); it('does not call delete API for selected pipeline when delete dialog is canceled', async () => { tree = await mountWithNPipelines(1); tree.find('.tableRow').at(0).simulate('click'); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; const cancelBtn = call.buttons.find((b: any) => b.text === 'Cancel'); await cancelBtn.onClick(); expect(deletePipelineSpy).not.toHaveBeenCalled(); }); it('calls delete API for selected pipeline after delete dialog is confirmed', async () => { tree = await mountWithNPipelines(1); tree.find('.tableRow').at(0).simulate('click'); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); expect(deletePipelineSpy).toHaveBeenLastCalledWith('test-pipeline-id0'); }); it('updates the selected indices after a pipeline is deleted', async () => { tree = await mountWithNPipelines(5); tree.find('.tableRow').at(0).simulate('click'); expect(tree.state()).toHaveProperty('selectedIds', ['test-pipeline-id0']); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); expect(tree.state()).toHaveProperty('selectedIds', []); }); it('updates the selected indices after multiple pipelines are deleted', async () => { tree = await mountWithNPipelines(5); tree.find('.tableRow').at(0).simulate('click'); tree.find('.tableRow').at(3).simulate('click'); expect(tree.state()).toHaveProperty('selectedIds', ['test-pipeline-id0', 'test-pipeline-id3']); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); expect(tree.state()).toHaveProperty('selectedIds', []); }); it('calls delete API for all selected pipelines after delete dialog is confirmed', async () => { tree = await mountWithNPipelines(5); tree.find('.tableRow').at(0).simulate('click'); tree.find('.tableRow').at(1).simulate('click'); tree.find('.tableRow').at(4).simulate('click'); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); expect(deletePipelineSpy).toHaveBeenCalledTimes(3); expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id0'); expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id1'); expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id4'); }); it('shows snackbar confirmation after pipeline is deleted', async () => { tree = await mountWithNPipelines(1); tree.find('.tableRow').at(0).simulate('click'); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); expect(updateSnackbarSpy).toHaveBeenLastCalledWith({ message: 'Successfully deleted 1 pipeline!', open: true, }); }); it('shows error dialog when pipeline deletion fails', async () => { tree = await mountWithNPipelines(1); tree.find('.tableRow').at(0).simulate('click'); TestUtils.makeErrorResponseOnce(deletePipelineSpy, 'woops, failed'); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); const lastCall = updateDialogSpy.mock.calls[1][0]; expect(lastCall).toMatchObject({ content: 'Deleting pipeline: test pipeline name0 failed with error: "woops, failed"', title: 'Failed to delete 1 pipeline', }); }); it('shows error dialog when multiple pipeline deletions fail', async () => { tree = await mountWithNPipelines(5); tree.find('.tableRow').at(0).simulate('click'); tree.find('.tableRow').at(2).simulate('click'); tree.find('.tableRow').at(1).simulate('click'); tree.find('.tableRow').at(3).simulate('click'); deletePipelineSpy.mockImplementation(id => { if (id.indexOf(3) === -1 && id.indexOf(2) === -1) { throw { text: () => Promise.resolve('woops, failed!'), }; } }); const deleteBtn = (tree.instance() as PipelineList) .getInitialToolbarState().actions.find(b => b.title === 'Delete'); await deleteBtn!.action(); const call = updateDialogSpy.mock.calls[0][0]; const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); // Should show only one error dialog for both pipelines (plus once for confirmation) expect(updateDialogSpy).toHaveBeenCalledTimes(2); const lastCall = updateDialogSpy.mock.calls[1][0]; expect(lastCall).toMatchObject({ content: 'Deleting pipeline: test pipeline name0 failed with error: "woops, failed!"\n\n' + 'Deleting pipeline: test pipeline name1 failed with error: "woops, failed!"', title: 'Failed to delete 2 pipelines', }); // Should show snackbar for the one successful deletion expect(updateSnackbarSpy).toHaveBeenLastCalledWith({ message: 'Successfully deleted 2 pipelines!', open: true, }); }); it('shows upload dialog when upload button is clicked', async () => { tree = await mountWithNPipelines(0); const instance = tree.instance() as PipelineList; const uploadBtn = instance.getInitialToolbarState().actions.find(b => b.title === 'Upload pipeline'); expect(uploadBtn).toBeDefined(); await uploadBtn!.action(); expect(instance.state).toHaveProperty('uploadDialogOpen', true); }); it('dismisses the upload dialog', async () => { tree = shallow(<PipelineList {...generateProps()} />); tree.setState({ uploadDialogOpen: true }); tree.find('UploadPipelineDialog').simulate('close', false); tree.update(); expect(tree.state()).toHaveProperty('uploadDialogOpen', false); }); it('does not try to upload if the upload dialog dismissed', async () => { tree = shallow(<PipelineList {...generateProps()} />); const handlerSpy = jest.spyOn(tree.instance() as any, '_uploadDialogClosed'); tree.setState({ uploadDialogOpen: true }); tree.find('UploadPipelineDialog').simulate('close', false); expect(handlerSpy).toHaveBeenLastCalledWith(false); expect(uploadPipelineSpy).not.toHaveBeenCalled(); }); it('does not try to upload if import method is local and no file is returned from upload dialog', async () => { tree = shallow(<PipelineList {...generateProps()} />); const handlerSpy = jest.spyOn(tree.instance() as any, '_uploadDialogClosed'); tree.setState({ uploadDialogOpen: true }); tree.find('UploadPipelineDialog').simulate('close', true, 'some name', null, '', ImportMethod.LOCAL); expect(handlerSpy).toHaveBeenLastCalledWith(true, 'some name', null, '', ImportMethod.LOCAL); expect(uploadPipelineSpy).not.toHaveBeenCalled(); }); it('does not try to upload if import method is url and no url is returned from upload dialog', async () => { tree = shallow(<PipelineList {...generateProps()} />); const handlerSpy = jest.spyOn(tree.instance() as any, '_uploadDialogClosed'); tree.setState({ uploadDialogOpen: true }); tree.find('UploadPipelineDialog').simulate('close', true, 'some name', null, '', ImportMethod.URL); expect(handlerSpy).toHaveBeenLastCalledWith(true, 'some name', null, '', ImportMethod.URL); expect(uploadPipelineSpy).not.toHaveBeenCalled(); }); it('tries to upload if import method is local and a file is returned from upload dialog', async () => { tree = shallow(<PipelineList {...generateProps()} />); tree.setState({ uploadDialogOpen: true }); tree.find('UploadPipelineDialog').simulate('close', true, 'some name', { body: 'something' }, '', ImportMethod.LOCAL); tree.update(); await createPipelineSpy; await uploadPipelineSpy; expect(uploadPipelineSpy).toHaveBeenLastCalledWith('some name', { body: 'something' }); expect(createPipelineSpy).not.toHaveBeenCalled(); // Check the dialog is closed expect(tree.state()).toHaveProperty('uploadDialogOpen', false); }); it('shows error dialog and does not dismiss upload dialog when upload fails', async () => { TestUtils.makeErrorResponseOnce(uploadPipelineSpy, 'woops, could not upload'); tree = shallow(<PipelineList {...generateProps()} />); tree.setState({ uploadDialogOpen: true }); tree.find('UploadPipelineDialog').simulate('close', true, 'some name', { body: 'something' }, '', ImportMethod.LOCAL); tree.update(); await uploadPipelineSpy; await TestUtils.flushPromises(); expect(uploadPipelineSpy).toHaveBeenLastCalledWith('some name', { body: 'something' }); expect(updateDialogSpy).toHaveBeenLastCalledWith(expect.objectContaining({ content: 'woops, could not upload', title: 'Failed to upload pipeline', })); // Check the dialog is not closed expect(tree.state()).toHaveProperty('uploadDialogOpen', true); }); it('tries to create a pipeline if import method is url and a url is returned from upload dialog', async () => { tree = shallow(<PipelineList {...generateProps()} />); tree.setState({ uploadDialogOpen: true }); tree.find('UploadPipelineDialog').simulate('close', true, 'some name', null, 'https://some.url.com', ImportMethod.URL); tree.update(); await createPipelineSpy; await uploadPipelineSpy; expect(createPipelineSpy).toHaveBeenLastCalledWith({ name: '<NAME>', url: { pipeline_url: 'https://some.url.com' } }); expect(uploadPipelineSpy).not.toHaveBeenCalled(); // Check the dialog is closed expect(tree.state()).toHaveProperty('uploadDialogOpen', false); }); it('shows error dialog and does not dismiss upload dialog when create fails', async () => { TestUtils.makeErrorResponseOnce(createPipelineSpy, 'woops, could not create'); tree = shallow(<PipelineList {...generateProps()} />); tree.setState({ uploadDialogOpen: true }); tree.find('UploadPipelineDialog').simulate('close', true, 'some name', null, 'https://some.url.com', ImportMethod.URL); tree.update(); await uploadPipelineSpy; await TestUtils.flushPromises(); expect(createPipelineSpy).toHaveBeenLastCalledWith({ name: '<NAME>', url: { pipeline_url: 'https://some.url.com' } }); expect(updateDialogSpy).toHaveBeenLastCalledWith(expect.objectContaining({ content: 'woops, could not create', title: 'Failed to upload pipeline', })); // Check the dialog is not closed expect(tree.state()).toHaveProperty('uploadDialogOpen', true); }); });
In dozens of towns across Ohio, rival sides have set up phone banks and door-knocking efforts. Unions and their allies have created We Are Ohio, a group that is leading the repeal effort, which has 10,000 volunteers and hopes a victory will discourage Republicans in other states from adopting anti-union legislation. Mr. Kasich’s allies have created Building a Better Ohio, financed by business and conservative donors, to block repeal. Photo In many ways Senate Bill 5 goes further than the antibargaining law that Wisconsin ’s Republican-led Legislature enacted in March over the protests of tens of thousands of union supporters. Ohio’s law allows only limited bargaining: If management and union do not reach a settlement, then city councils and school boards can impose their side’s final contract offer unilaterally. The Ohio law bans binding arbitration and bargaining on health coverage, pensions or staffing levels. It also requires government workers to pay at least 15 percent of their health insurance costs and pay 10 percent of their salaries toward their pensions. The Ohio Senate president, Thomas E. Niehaus, who is campaigning against repeal, said, “These are reasonable reforms asking our public sector employees to do what private sector employees have been doing for decades: paying more for their health care and their pension benefits.” He denied that the bill eviscerated collective bargaining. “We are reforming collective bargaining,” he said. But one prominent Republican opponent of Senate Bill 5, State Senator Bill Seitz, said the bill all but erased collective bargaining by letting management decide which side’s final offer would prevail. He said it was like “going to divorce court and finding out your wife’s father is the judge.” A Quinnipiac poll in late September found that Ohioans favor repeal by a 13 percentage point margin, 51 percent to 38 percent, down from a 24 percentage point margin in July. Opponents of repeal say they have significantly narrowed the gap since they began broadcasting advertisements after Labor Day. As one of their most powerful arguments, opponents of Senate Bill 5 say the law will jeopardize public safety by no longer letting firefighters and police bargain over staffing levels. A union-backed television commercial shows 3-year-old Zoey Quinn slumped on a Cincinnati firefighter’s arm as he rescued her from her family’s burning house. The advertisement warns that firefighters might not be able to save Zoey next time, because the law bars bargaining over minimum staffing levels. Advertisement Continue reading the main story Zoey’s great-grandmother, Marlene Quinn, says in the advertisement, “I don’t want the politicians in Columbus making decisions for the firefighters, police, teachers, nurses or any organization that’s helping people. Fewer firefighters can mean the difference between life or death.” Photo Ms. Quinn is now at the center of a controversy because the side fighting the repeal has run a broadcast advertisement using her words, without her permission, and then goes on to say that without Senate Bill 5 there will be more layoffs of firemen. Repeal supporters say that is twisting her sentiments. Jeff Berding, a former member of the Cincinnati City Council and one of the most outspoken Democratic supporters of Senate Bill 5, said the union-backed ads were misleading, arguing that government officials would never ignore safety concerns. “You’re not going to have elected officials do things that are terribly unfair to firefighters and police and jeopardizing public safety,” he said. “Officials want the firefighters and police endorsing them in the next election.” He said he backed Senate Bill 5 because “I want some options besides raising taxes and laying off police and firefighters.” He said government officials often had little recourse but to increase taxes or dismiss public employees, thereby cutting public services, because unions, usually dominated by more senior workers, often refuse to make concessions. Union leaders recognize that concessions would hurt the more senior workers, while in a financial shortfall resulting from unions’ not granting concessions, those laid off would be the more junior workers. Senate Bill 5 allows government to lay off workers outside of seniority. Newsletter Sign Up Continue reading the main story Please verify you're not a robot by clicking the box. Invalid email address. Please re-enter. You must select a newsletter to subscribe to. Sign Up You will receive emails containing news content , updates and promotions from The New York Times. You may opt-out at any time. You agree to receive occasional updates and special offers for The New York Times's products and services. Thank you for subscribing. An error has occurred. Please try again later. View all New York Times newsletters. Tim Burga, the president of the Ohio State A.F.L.-C.I.O. , said Senate Bill 5 was an assault on the state’s 360,000 public employees and was based on numerous misunderstandings. He said that many government employees already contributed heavily toward their health and pension costs, and that many had accepted wage freezes to help cut budget deficits. “This shows that collective bargaining works,” Mr. Burga said. “I’ve never seen an issue catch fire like this. You look at the 1.3 million signatures we collected and the outpouring of opposition because this was such a blatant politically motivated attack. The energy is there on our side.” Mack D. Mariani, a professor of political science at Xavier University in Cincinnati, said an unrelated issue on the Ohio ballot would hurt the repeal effort. The Tea Party is backing a proposed state constitutional amendment that would let Ohioans opt out of the mandatory requirements of the health law that Congressional Democrats enacted last year. That should lead to higher-than-normal conservative turnout, undercutting labor’s repeal effort. “Right now, the unions are definitely winning the ground game in terms of getting people on the streets, knocking on doors,” Mr. Mariani said. “If the unions win, it will certainly be seen as a big blow for Kasich.”
// helper function: clone the data attached to this string bool wxStringBase::AllocCopy(wxString& dest, int nCopyLen, int nCopyIndex) const { if ( nCopyLen == 0 ) { dest.Init(); } else { if ( !dest.AllocBuffer(nCopyLen) ) { return false; } memcpy(dest.m_pchData, m_pchData + nCopyIndex, nCopyLen*sizeof(wxChar)); } return true; }
/** * This util for generating record field with given schema properties. */ public static List<Node> addRecordFields(List<String> required, Set<Map.Entry<String, Schema>> fields) throws BallerinaOpenApiException { List<Node> recordFieldList = new ArrayList<>(); for (Map.Entry<String, Schema> field : fields) { String fieldNameStr = escapeIdentifier(field.getKey().trim()); List<Node> schemaDoc = getFieldApiDocs(field.getValue()); NodeList<Node> schemaDocNodes = createNodeList(schemaDoc); IdentifierToken fieldName = AbstractNodeFactory.createIdentifierToken(fieldNameStr); TypeDescriptorNode fieldTypeName = getTypeGenerator(field.getValue(), fieldNameStr) .generateTypeDescriptorNode(); MarkdownDocumentationNode documentationNode = createMarkdownDocumentationNode(schemaDocNodes); MetadataNode metadataNode = createMetadataNode(documentationNode, createEmptyNodeList()); if (required != null) { if (!required.contains(field.getKey().trim())) { if (field.getValue().getDefault() != null) { Token defaultValue; if ((field.getValue().getType()).equals(STRING)) { if (field.getValue().getDefault().toString().trim().equals("\"")) { defaultValue = AbstractNodeFactory.createIdentifierToken("\"" + "\\" + field.getValue().getDefault().toString() + "\""); } else { defaultValue = AbstractNodeFactory.createIdentifierToken("\"" + field.getValue().getDefault().toString() + "\""); } } else { defaultValue = AbstractNodeFactory.createIdentifierToken (field.getValue().getDefault().toString()); } ExpressionNode expressionNode = createRequiredExpressionNode(defaultValue); RecordFieldWithDefaultValueNode defaultNode = NodeFactory.createRecordFieldWithDefaultValueNode (metadataNode, null, fieldTypeName, fieldName, createToken(EQUAL_TOKEN), expressionNode, createToken(SEMICOLON_TOKEN)); recordFieldList.add(defaultNode); } else { RecordFieldNode recordFieldNode = NodeFactory.createRecordFieldNode(metadataNode, null, fieldTypeName, fieldName, createToken(QUESTION_MARK_TOKEN), createToken(SEMICOLON_TOKEN)); recordFieldList.add(recordFieldNode); } } else { RecordFieldNode recordFieldNode = NodeFactory.createRecordFieldNode(metadataNode, null, fieldTypeName, fieldName, null, createToken(SEMICOLON_TOKEN)); recordFieldList.add(recordFieldNode); } } else { RecordFieldNode recordFieldNode = NodeFactory.createRecordFieldNode(metadataNode, null, fieldTypeName, fieldName, createToken(QUESTION_MARK_TOKEN), createToken(SEMICOLON_TOKEN)); recordFieldList.add(recordFieldNode); } } return recordFieldList; }
<gh_stars>1-10 import { httpError } from '@midwayjs/core'; export class MultipartInvalidFilenameError extends httpError.BadRequestError { constructor(filename: string) { super(`Invalid update file name ${filename}, please check it`); } }
<gh_stars>1000+ // Copyright 2019-present Facebook Inc. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schema import ( "bytes" "database/sql/driver" "encoding/gob" "fmt" "math/big" "net" "net/http" "net/url" "strings" "time" "entgo.io/ent" "entgo.io/ent/dialect" "entgo.io/ent/dialect/sql" "entgo.io/ent/entc/integration/ent/role" "entgo.io/ent/schema/field" "github.com/google/uuid" ) // FieldType holds the schema definition for the FieldType entity. // used for testing field types. type FieldType struct { ent.Schema } // Fields of the File. func (FieldType) Fields() []ent.Field { //nolint:funlen return []ent.Field{ // ---------------------------------------------------------------------------- // Basic types field.Int("int"), field.Int8("int8"), field.Int16("int16"), field.Int32("int32"), field.Int64("int64"). UpdateDefault(func() int64 { return 100 }), field.Int("optional_int"). Optional(), field.Int8("optional_int8"). Optional(), field.Int16("optional_int16"). Optional(), field.Int32("optional_int32"). Optional(), field.Int64("optional_int64"). Optional(), field.Int("nillable_int"). Optional(). Nillable(), field.Int8("nillable_int8"). Optional(). Nillable(), field.Int16("nillable_int16"). Optional(). Nillable(), field.Int32("nillable_int32"). Optional(). Nillable(), field.Int64("nillable_int64"). Optional(). Nillable(), field.Int32("validate_optional_int32"). Optional(). Max(100), field.Uint("optional_uint"). Optional(), field.Uint8("optional_uint8"). Optional(), field.Uint16("optional_uint16"). Optional(), field.Uint32("optional_uint32"). Optional(), field.Uint64("optional_uint64"). Optional(), field.Enum("state"). Values("on", "off"). Optional(), field.Float("optional_float"). Optional(), field.Float32("optional_float32"). Optional(), // ---------------------------------------------------------------------------- // Dialect-specific types field.Text("text"). Optional(). SchemaType(map[string]string{ dialect.MySQL: "mediumtext", }), field.Time("datetime"). Optional(). SchemaType(map[string]string{ dialect.MySQL: "datetime", dialect.Postgres: "date", }), field.Float("decimal"). Optional(). SchemaType(map[string]string{ dialect.MySQL: "decimal(6,2)", dialect.Postgres: "numeric", }), field.Other("link_other", &Link{}). SchemaType(map[string]string{ dialect.Postgres: "varchar", dialect.MySQL: "varchar(255)", dialect.SQLite: "varchar(255)", }). Optional(). Default(DefaultLink()), field.Other("link_other_func", &Link{}). SchemaType(map[string]string{ dialect.Postgres: "varchar", dialect.MySQL: "varchar(255)", dialect.SQLite: "varchar(255)", }). Optional(). Default(DefaultLink), field.String("mac"). Optional(). GoType(MAC{}). SchemaType(map[string]string{ dialect.Postgres: "macaddr", }). Validate(func(s string) error { _, err := net.ParseMAC(s) return err }), field.Other("string_array", Strings{}). Optional(). SchemaType(map[string]string{ dialect.Postgres: "text[]", dialect.SQLite: "json", dialect.MySQL: "blob", }), field.String("password"). Optional(). Sensitive(). SchemaType(map[string]string{ dialect.MySQL: "char(32)", }), // ---------------------------------------------------------------------------- // Custom Go types field.String("string_scanner"). GoType(StringScanner("")). Nillable(). Optional(), field.Int64("duration"). GoType(time.Duration(0)). UpdateDefault(func() time.Duration { return time.Duration(100) }). Optional(), field.String("dir"). GoType(http.Dir("dir")). DefaultFunc(func() http.Dir { return "unknown" }), field.String("ndir"). Optional(). Nillable(). NotEmpty(). GoType(http.Dir("ndir")), field.String("str"). Optional(). GoType(sql.NullString{}). DefaultFunc(func() sql.NullString { return sql.NullString{String: "default", Valid: true} }), field.String("null_str"). Optional(). Nillable(). GoType(&sql.NullString{}). DefaultFunc(func() *sql.NullString { return &sql.NullString{String: "default", Valid: true} }), field.String("link"). Optional(). NotEmpty(). GoType(Link{}), field.String("null_link"). Optional(). Nillable(). GoType(&Link{}), field.Bool("active"). Optional(). GoType(Status(false)), field.Bool("null_active"). Optional(). Nillable(). GoType(Status(false)), field.Bool("deleted"). Optional(). Nillable(). GoType(&sql.NullBool{}), field.Time("deleted_at"). Optional(). GoType(&sql.NullTime{}). Default(func() *sql.NullTime { return &sql.NullTime{Time: time.Now(), Valid: true} }). UpdateDefault(func() *sql.NullTime { return &sql.NullTime{Time: time.Now(), Valid: true} }), field.Bytes("raw_data"). Optional(). MaxLen(20). MinLen(3), field.Bytes("sensitive"). Optional(). Sensitive(), field.Bytes("ip"). Optional(). GoType(net.IP("127.0.0.1")). DefaultFunc(func() net.IP { return net.IP("127.0.0.1") }). Validate(func(i []byte) error { if net.ParseIP(string(i)) == nil { return fmt.Errorf("ent/schema: invalid ip %q", string(i)) } return nil }), field.Int("null_int64"). Optional(). GoType(&sql.NullInt64{}), field.Int("schema_int"). Optional(). GoType(Int(0)), field.Int8("schema_int8"). Optional(). GoType(Int8(0)), field.Int64("schema_int64"). Optional(). GoType(Int64(0)), field.Float("schema_float"). Optional(). GoType(Float64(0)), field.Float32("schema_float32"). Optional(). GoType(Float32(0)), field.Float("null_float"). Optional(). GoType(&sql.NullFloat64{}), field.Enum("role"). Default(string(role.Read)). GoType(role.Role("role")), field.Enum("priority"). Optional(). GoType(role.Priority(0)), field.UUID("optional_uuid", uuid.UUID{}). Optional(), field.UUID("nillable_uuid", uuid.UUID{}). Optional(). Nillable(), field.Strings("strings"). Optional(), field.Bytes("pair"). GoType(Pair{}). DefaultFunc(func() Pair { return Pair{K: []byte("K"), V: []byte("V")} }), field.Bytes("nil_pair"). GoType(&Pair{}). Optional(). Nillable(), field.String("vstring"). GoType(VString("")). DefaultFunc(func() VString { return "value scanner string" }), field.String("triple"). GoType(Triple{}). DefaultFunc(func() Triple { return Triple{E: [3]string{"A", "B", "C"}} }), field.Int("big_int"). Optional(). GoType(BigInt{}), field.Other("password_other", Password("")). Optional(). Sensitive(). SchemaType(map[string]string{ dialect.MySQL: "char(32)", dialect.SQLite: "char(32)", dialect.Postgres: "varchar", }), } } type Password string func (p Password) Value() (driver.Value, error) { return string(p), nil } func (p *Password) Scan(src interface{}) error { switch src := src.(type) { case nil: return nil case string: *p = Password(src) return nil case []byte: *p = Password(src) return nil default: return fmt.Errorf("scan: unable to scan type %T into string", src) } } type Strings []string func (s *Strings) Scan(v interface{}) (err error) { switch v := v.(type) { case nil: case []byte: err = s.scan(string(v)) case string: err = s.scan(v) default: err = fmt.Errorf("unexpected type %T", v) } return } func (s *Strings) scan(v string) error { if v == "" { return nil } if l := len(v); l < 2 || v[0] != '{' && v[l-1] != '}' { return fmt.Errorf("unexpcted array format %q", v) } *s = strings.Split(v[1:len(v)-1], ",") return nil } func (s Strings) Value() (driver.Value, error) { return "{" + strings.Join(s, ",") + "}", nil } type VString string func (s *VString) Scan(v interface{}) (err error) { switch v := v.(type) { case nil: case string: *s = VString(v) case []byte: *s = VString(v) default: err = fmt.Errorf("unexpected type %T", v) } return } func (s VString) Value() (driver.Value, error) { return string(s), nil } type Triple struct { E [3]string } // Value implements the driver Valuer interface. func (t Triple) Value() (driver.Value, error) { return fmt.Sprintf("(%s,%s,%s)", t.E[0], t.E[1], t.E[2]), nil } // Scan implements the Scanner interface. func (t *Triple) Scan(value interface{}) (err error) { switch v := value.(type) { case nil: case []byte: es := strings.Split(strings.TrimPrefix(string(v), "()"), ",") t.E[0], t.E[1], t.E[2] = es[0], es[1], es[2] case string: es := strings.Split(strings.TrimPrefix(v, "()"), ",") t.E[0], t.E[1], t.E[2] = es[0], es[1], es[2] default: err = fmt.Errorf("unexpected type %T", v) } return } type Pair struct { K, V []byte } // Value implements the driver Valuer interface. func (p Pair) Value() (driver.Value, error) { var b bytes.Buffer if err := gob.NewEncoder(&b).Encode(p); err != nil { return nil, err } return b.Bytes(), nil } // Scan implements the Scanner interface. func (p *Pair) Scan(value interface{}) (err error) { switch v := value.(type) { case nil: case []byte: err = gob.NewDecoder(bytes.NewBuffer(v)).Decode(p) default: err = fmt.Errorf("unexpected type %T", v) } return } type ( Int int Int8 int8 Int64 int64 Status bool Float64 float64 Float32 float32 ) type Link struct { *url.URL } func DefaultLink() *Link { u, _ := url.Parse("127.0.0.1") return &Link{URL: u} } // Scan implements the Scanner interface. func (l *Link) Scan(value interface{}) (err error) { switch v := value.(type) { case nil: case []byte: l.URL, err = url.Parse(string(v)) case string: l.URL, err = url.Parse(v) default: err = fmt.Errorf("unexpected type %T", v) } return } // Value implements the driver Valuer interface. func (l Link) Value() (driver.Value, error) { if l.URL == nil { return nil, nil } return l.String(), nil } type MAC struct { net.HardwareAddr } // Scan implements the Scanner interface. func (m *MAC) Scan(value interface{}) (err error) { switch v := value.(type) { case nil: case []byte: m.HardwareAddr, err = net.ParseMAC(string(v)) case string: m.HardwareAddr, err = net.ParseMAC(v) default: err = fmt.Errorf("unexpected type %T", v) } return } // Value implements the driver Valuer interface. func (m MAC) Value() (driver.Value, error) { return m.HardwareAddr.String(), nil } type StringScanner string // Scan implements the Scanner interface. func (s *StringScanner) Scan(value interface{}) (err error) { switch v := value.(type) { case nil: case string: *s = StringScanner(v) default: err = fmt.Errorf("unexpected type %T", v) } return } // Value implements the driver Valuer interface. func (s StringScanner) Value() (driver.Value, error) { return string(s), nil } type BigInt struct { *big.Int } func NewBigInt(i int64) BigInt { return BigInt{Int: big.NewInt(i)} } func (b *BigInt) Scan(src interface{}) error { var i sql.NullString if err := i.Scan(src); err != nil { return err } if !i.Valid { return nil } if b.Int == nil { b.Int = big.NewInt(0) } // Value came in a floating point format. if strings.ContainsAny(i.String, ".+e") { f := big.NewFloat(0) if _, err := fmt.Sscan(i.String, f); err != nil { return err } b.Int, _ = f.Int(b.Int) } else if _, err := fmt.Sscan(i.String, b.Int); err != nil { return err } return nil } func (b BigInt) Value() (driver.Value, error) { return b.String(), nil } func (b BigInt) Add(c BigInt) BigInt { b.Int = b.Int.Add(b.Int, c.Int) return b }
def eval(self, data_key: Tuple[str] or str = 'img', accumulate_grad_in_iter=None, accumulate_grad=None, target_key: Tuple[str] or str = 'target', cast_target=None): accumulate_grad_in_iter = None accumulate_grad = None data_key = wrap_tuple(data_key) target_key = wrap_tuple(target_key) for ind, loader_name in enumerate(self.__val_loader_names): cur_loader_state = self.__data_provider.state_dict()[loader_name] n_iter = len(cur_loader_state["samples"]) i = 0 for i in range(n_iter - 1): batch = cur_loader_state["samples"][i] input_data = self._parse_data(batch, data_key[ind]) target = self._parse_data(batch, target_key[ind]) for cb in self.__val_callbacks: cb.on_minibatch_begin(loader_name=loader_name, batches_count=self.__eval_batches_count, batch=batch, input=input_data, target=target, data_key=data_key[ind], target_key=target_key[ind], stepper=self.__stepper) loss, eval_result = self.__stepper.eval_step(input_data, target, return_out=True, callbacks=self.__val_callbacks) self.__eval_batches_count += 1 for cb in self.__val_callbacks: cb.on_minibatch_end(loader_name=loader_name, batches_count=self.__eval_batches_count, loss=loss, input=input_data, output=eval_result, target=target, data_key=data_key[ind], target_key=target_key[ind], stepper=self.__stepper) batch = cur_loader_state["samples"][i] input_data = self._parse_data(batch, data_key[ind]) target = self._parse_data(batch, target_key[ind]) for cb in self.__val_callbacks: cb.on_minibatch_begin(loader_name=loader_name, batches_count=self.__eval_batches_count, batch=batch, input=input_data, target=target, data_key=data_key[ind], target_key=target_key[ind], stepper=self.__stepper) loss, eval_result = self.__stepper.eval_step(input_data, target, return_out=True, callbacks=self.__val_callbacks) self.__eval_batches_count += 1 for cb in self.__val_callbacks: cb.on_minibatch_end(loader_name=loader_name, batches_count=self.__eval_batches_count, loss=loss, input=input_data, output=eval_result, target=target, data_key=data_key[ind], target_key=target_key[ind], stepper=self.__stepper)
use std::error::Error; use futures_util::{future, TryStreamExt}; use kucoin::{client::SClient, spot::*}; use tokio::time; #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { let client = SClient::new(); let ws_conn = client.ws().connect().await?; let topic = TopicKlines::new("BTC-USDT".parse()?, KlineInterval::OneMinute); let stream = ws_conn.subscribe(topic).await?; tokio::spawn(async move { time::sleep(time::Duration::from_secs(10)).await; println!("Closing the stream..."); ws_conn.close().await; }); println!("Printing 10 seconds of BTC klindes..."); stream .try_for_each(|kline| { println!("{}", kline); future::ok(()) }) .await?; println!("Done!"); Ok(()) }
{-# LANGUAGE QuasiQuotes, PackageImports #-} module Plugin.DaumDic (plugin) where import CmdUtil (Util(Util, rawBody), Plugin(Plugin)) import Control.Monad import Data.Maybe (isJust) import "wreq" Network.Wreq (getWith, responseBody, defaults, param) import "aeson" Data.Aeson (decode, FromJSON(..), Value(Object), (.:)) import "lens" Control.Lens ((^.), (.~), (&)) import Text.Regex.PCRE.ByteString.Lazy (compile, compUTF8, execBlank, regexec) import "interpolatedstring-perl6" Text.InterpolatedString.Perl6 import qualified Data.ByteString.UTF8 as BSU (fromString) import qualified Data.ByteString.Lazy as BSLazy import qualified Data.ByteString.Lazy.UTF8 as BSLazy import Data.Text.Encoding (decodeUtf8, encodeUtf8) import Data.Text (Text) data DaumDic = DaumDic { definitions :: [Text] } instance FromJSON DaumDic where parseJSON (Object v) = DaumDic <$> v .: [q|items|] parseJSON _ = mzero bold :: BSLazy.ByteString bold = BSLazy.fromString "\STX" mth :: BSLazy.ByteString -> IO (Maybe (BSLazy.ByteString, BSLazy.ByteString)) mth str = compile compUTF8 execBlank (BSLazy.fromString "^[a-z]+\\|([^\\|]+)\\|(.+)$") >>= (let f (Left (ix, msg)) = error [qq|$msg at $ix|] f (Right regex) = return regex in f) >>= liftM (\m -> case m of Right (Just (_, _, _, [w, ex])) -> Just (w, ex) Right _ -> Nothing Left (_, s) -> error s) . flip regexec str parse :: Util -> IO [BSLazy.ByteString] parse (Util { rawBody = str }) = getWith (defaults & param [q|mod|] .~ [[q|json|]] & param [q|code|] .~ [[q|utf_in_out|]] & param [q|enc|] .~ [[q|utf|]] & param [q|cate|] .~ [[q|eng|]] & param [q|q|] .~ [decodeUtf8 str]) [q|http://suggest.dic.daum.net/dic_all_ctsuggest|] >>= (\response -> case (decode (response ^. responseBody) :: Maybe DaumDic) of Nothing -> return [] Just (DaumDic { definitions = xs }) -> do ys <- mapM (mth . BSLazy.fromStrict . encodeUtf8) xs let Just ps = sequence . filter isJust $ ys print str if null ps then return [] else return . (:[]) . (\(a, b) -> BSLazy.append [qq|$bold$a$bold |] b) $ if not (any ((== BSLazy.fromStrict str) . fst) ps) then head ps else head (filter ((== BSLazy.fromStrict str) . fst) ps) ) plugin :: Plugin plugin = Plugin (map BSU.fromString ["!dic", "!dict", ":dic", ":dict", "dic", "!사전", ":사전", "!d", ":d", "ㅅ"]) parse
Copper (II) Acylhydrazinates. Their Synthesis and Characterization Acylhydrazine derived furanyl and thienyl Schiff bases and their Cu(II) complexes have been prepared and characterized on the basis of their physical, spectral and analytical data. The preferred enolic form of the Schiff base function as a tetradentate ligand during coordination to the metal ion yielding a square planar complex. The Schiff bases and their complexes with different anions were tested for their antibacterial activity against bacterial species such as Escherichia coli, Staphylococcus aureus, Pseudomonas aeruginosa andKlebsiella pneumonae. INTRODUCTION Many studies 1have indicated the interesting and varied ligational behavior of hydrazine and hydrazones towards transition metal ions. Their bacteriostatic properties were well studied by many researchers1-12. Some hydrazones have also been found to act as potent inhibitors for DNA synthesis in a variety of cultured human cells and in particular, their Cu(II) complexes were shown to produce significant inhibition of tumor growth when administered to mice bearing a transplanted fibrosarcoma a3 '14 Because of such promising results, the antibacterial metallo-organic chemistry of such ligands is yet to be explored. We have previously reported a5"8 some antibacterial acylhydrazine derived Schiff bases and their various transition metal complexes. The present study was undertaken in order to prepare and study the metallo-organic/coordination behavior of Cu(II) with the Schiff base ligands L and L2. 0% /NH _N (20 mL) was added to a stirred hot ethanol solution (30 mL) of oxaloyldihydrazide (1.2 g, 0.01 mol). Then 2-3 drops of cone. H2SO4 were added and the mixture was refluxed for 8 h. The reaction mixture was then cooled and left for 24 h at room temperature. During this period, a light yellow solid was formed which was recrystallized from hot ethanol to give the desired product (L1) (1.8 g). L 2 was prepared according to the same reported 17 method described as for L Preparation of the Metal Complexes An ethanol solution (20 mL) of the appropriate Cu(II) salt (0.001 mol) was added to a well-stirred hot ethanol solution (20 mL) of the respective Schiff base (0.001 mol). The mixture was refluxed for 8 h. Then on cooling at room temperature; a precipitated solid product was formed. The product thus obtained was filtered, washed with ethanol, then with ether and dried. Crystallization from aqueous ethanol (50 %) gave the desired metal complexes. Physicnl Properties The Schiff bases (L and L2) ( (Table I) Model studies of these Schiff bases (Fig. 2) show that in no case can these Schiff bases exhibit tridentate behavior. They are only capable of exhibiting tetradentate (Fig. 2) behavior. However, they have a tendency to exhibit different tautomers, either as diketone , dienol and as well as ketoenol . In the dienol form, the ligands may coordinate metal ions through X (X=O or S) donor sites of furanyl or thienyl and through the two azomethine nitrogens (HC=N). The diketone and ketoenol forms can also behave similarly, as tetradentate ligands, coordinating through the same coordination sites as dienol form. Infrared Spectra The IR spectra of the Schiff bases and their Cu(II) complexes were recorded in KBr and are reported in Tables with some tentative assignments of their important characteristic bands. All these Schiff bases showed the absence of the bands at -03420 cm q and 1730 cm due to characteristic v(NH) and (C=O) stretching vibrations of the respective hydrazinoamine and aldehyde. Instead, a new band at 1635 cm q assigned z to the omethine (HC=N) linkage appeared in the spectra of all of the proposed Schiff base ligands. Also a band at 1020 cm 1 due to a HN-N vibration appeared in the spectra of L and L2. This observation suggested m that the hydrazinoamine and aldehyde moieties of the starting reagents are no longer present and that condensation to the respective Schiff bases has taken place. OH C NH --N CH X Fig. 2 Tautomeric forms of the investigated ligands A comparison of the infrared spectra of the Schiff bases and their metal complexes indicated 2'22 that the Schiff bases are tetradentately coordinated to the metal ions. The band due to v(C=O) was absent in the spectra of the complexes suggesting 23 enolization of the Schiff bases during complexation. This is supported by the evidence that the band due to v(OH) in the spectra of these complexes was observed at~3315 cm-1. 2 These facts suggested that t.he Schiff bases L and L remained in the keto form in the solid state as uncomplexed ligands but in solution the keto and enol forms were in equilibrium24, as shown in Fig. 2B. The amide-II band was split, displaced to higher frequency and reduced in intensity. Shift (5-10 cm) to higher frequency of the v(N-N) band at 1025. cm and its splitting indicated coordination of the azomethine nitrogen. Moreover, a low frequency shift (10-15 cm"1) of the band due to the azomethine (HC=N) linkage at 1635 cm "1 indicated involvement of the azom.thine nitrogen in coordination. The appearance of weak, low 25 frequency new bands at~360 and --455 cm" were assigned to metal-sulfur v(M-S) in the thienyl and metal-oxygen v(M-O) in the furanyl ligands. These bands were only observable in the spectra of the metal complexes and not in the spectra of their Schiff bases which in turn confmned the participation of the 22,26 heteroatoms X (S or O) in the coordination. These observations, in turn, suggested a square planar geometry for the Cu(II) complexes (Fig. 3). NMR Spectra The NMR spectra of the free ligands and some of their metal complexes have been recorded in DMSO-d6. The features of the free ligands are already reported 7 elsewhere which have shown the NMR spectra of the free ligands in support to the conclusions derived from the IR spectra as expected. In the spectra of the Cu(II) complexes (Table III), these proton signals appeared much more downfield, as expected, due to increased 28 29 conjugation during coordination Table II. IR 13C NMR (DMSO-d6) (,ppm) Magnetic Moment and Electronic Spectra The UV-Visible spectral bands of the Cu(II) complexes are recorded in Table I. The copper(II) complexes exhibited 3 magnetic moment of 1.55-1.65 B.M at room temperature. These values are quite close to the spinallowed values expected for a S=1/2 system and the complexes attain a square planar geometry around the copper(II) ion. These copper(II) complexes (Table II) display a broad band at---14680 cm q due to 2B1 2Eg -1 31 32 and two bands at -16395 and -27320 cm assigned due to d-d transitions and a charge transfer band, confirming their square planar enviroranent 33 (Fig. 3). Antibacterial Properties Antibacterial properties of the ligands and their metal complexes were studied against bacterial species Escherichia coB, Pseudomonas aeruginosa, Staphylococcus aureus and Klebsiella pneumonae. These were tested at a concentration of 30 xg/0.01 mL in DMF solution using a paper disc diffusion method devised and reported 34"37 earlier. The results of these studies reproduced in Table IV indicated that both the Schiff-base ligands and their metal complexes individually exhibited varying degrees of inhibitory effects on the growth of the testing bacterial species. The antibacterial results evidently show that the activity of the unchelated compounds became more pronounced and prominent when chelated with the metal ion. When the same metal chelate having different anions was individually screened, the degree of bectericidal activity/potency also varied. L1;X =0 L2; X S Y=NO 3, SO 4, CH3COO, C204 n=l or2 (Fig. 3) Proposed Structure of the Cu(II) Complex From the obtained data, it was generally observed that the order of potency in comparison to the metal chelate having chloride anion evaluated and reported earlier 38 and to the results of the present studies against the same tested bacterial species under the same conditions were found to follow the order as NO3> C204> CH3CO2> CI> SO4 On the basis of these results, it is therefore, strongly claimed 39'4 tha differem anions dominantly effect the biological behavior of the metal chelates. It is, however, expected that factors such as solubility, conductivity, dipole moment and cell permeability mechanisms are certainly influenced by the presence of these anions in the chelate and may cause m increasing this activity. These studies provide a useful information about the biological activity of compounds influenced by the anions which stay outside the coordination sphere of the chelated complex.
// Distributed under the MIT License (See // accompanying file "LICENSE" or the website // http://www.opensource.org/licenses/mit-license.php) #include "LightUniforms.h" #include "StandardLightScene.h" #include "StandardLightOperators.h" #include "../../Math/Transformations.h" namespace RenderCore { namespace LightingEngine { namespace Internal { CB_Ambient MakeAmbientUniforms(const EnvironmentalLightingDesc& desc) { return CB_Ambient { desc._ambientLight, desc._skyReflectionScale, desc._skyReflectionBlurriness, {0,0,0}, }; } CB_RangeFog MakeRangeFogUniforms(const EnvironmentalLightingDesc& desc) { if (desc._doRangeFog) return CB_RangeFog { desc._rangeFogInscatter, desc._rangeFogThickness }; return CB_RangeFog { Float3(0.f, 0.f, 0.f), 0 }; } static unsigned AsLightShapeId(LightSourceShape shape) { return unsigned(shape); } CB_Light MakeLightUniforms(const StandardPositionalLight& light, const LightSourceOperatorDesc& operatorDesc) { return CB_Light { light._position, light._cutoffRange, light._brightness, light._radii[0], ExtractRight(light._orientation), light._radii[1], ExtractForward(light._orientation), AsLightShapeId(operatorDesc._shape), ExtractUp(light._orientation), 0 }; } CB_Light MakeBlankLightDesc() { return CB_Light { Float3(0.f, 0.f, 0.f), 0.f, Float3(0.f, 0.f, 0.f), 0.f, Float3(0.f, 0.f, 0.f), 0.f, Float3(0.f, 0.f, 0.f), 0, Float3(0.f, 0.f, 0.f), 0 }; } CB_VolumeFog MakeBlankVolumeFogDesc() { return CB_VolumeFog { 0.f, 0.f, 0.f, 0, Float3(0.f, 0.f, 0.f), 0, Float3(0.f, 0.f, 0.f), 0 }; } }}}
It’s been 6 months since we released the revamped trigger policy. On the whole things have gone well, since the fundamentals – if you miss triggers they may not happen, your opponents don’t have to point them out – have proved to be popular. We avoided making any changes to the rules in July, so as to give us a bunch of time to see where things were working and where it wasn’t. After studying it for a while, we concluded that there were a few problems. They included: It was hard to learn. It was hard to learn. It was hard to learn. It was too fiddly. Hard to learn, too much it was. To the Policymobile! We assembled a team of expert policymakers who’d been voicing opinions on how we might improve things and started brainstorming. What assumptions had we made that could be challenged? Would applying what we’d learned to the current policy make things better? Were there creative approaches we could take? We laid all the parameters down and rolled up our sleeves. We did produce an evolution of the Lapsing Ability policy. It was solid, and would have closed up many loopholes. It got as far as a full workup as though it were written policy, but that’s where it’s weakness really showed. A comprehensive Lapsing Ability policy weighed in at the same length as the JAR. For just Missed Triggers. It was still too hard to learn. Surely we could do better. And We Did We gained some goals that we’d learned from the last six months, in approximate order of importance: It had to be super-easy for players to learn, ideally how the players would play naturally. Players moving from Regular REL to Competitive wouldn’t get in trouble for playing like they were still at Regular REL. Players still had to be responsible for triggers; they couldn’t choose to ignore them. Players shouldn’t be upset by having a judge around. Players shouldn’t have any influence on the penalty. Judges should have a clear idea when they should be intervening. Opponents shouldn’t be able to sit on missed triggers for too long to create weird or advantageous situations. We’d stumble across other small issues as we worked through possibilities, but the parameters above were enough to make things challenging. Feel free to take those and try to come up with a solution. It’s remarkably tough, and for a while, we kept running aground – JAR-length solutions incoming! The irony of the final solution is that we had 95% of it way back in January. It had been rejected because the various versions let the opponent game out the penalty – whether they would call it out would depend on how valuable they thought the penalty would be for them. Players should never be thinking about penalties, let alone trying to angle-shoot them, but taking that ability away required either giving judges long lists, or letting them intervene on something as vague as “if you think you should”. That makes it almost impossible for a judge to notice something as they wander by, and requires them to have the same understanding of the game as the players. Bleah. The breakthrough came when we threw away one of the fundamental principles in the IPG – the idea that the penalty and the remedy were related. That sounds simple, but it’s a principle that has been an inherent part of all infractions up to this point. In the early days of the IPG we had a separate section for each infraction called “Penalty”. It got removed because it never had anything in it aside from the penalty, and for the first time ever, we wanted it back. Having totally different rules for the two let us split responsibility in a way that makes sense, and, as we’ll see below, let us make the rules substantially shorter. Shorter? Oh yes. Let’s go! Definition A triggered ability triggers, but the player controlling the ability doesn’t demonstrate awareness of the trigger’s existence and/or forgets to announce its effect. If a triggered ability has been partially or incorrectly resolved, instead treat it as a Game Play Error — Game Rule Violation. The language is tightenened up a bit here. Demonstrating awareness of the trigger is a central tenet, and allows for use of things like Out-Of-Order Sequencing in concert with missed triggers. For example, playing an Attended Knight and saying “Go” while reaching into your box for a Soldier token is a clear demonstration of awareness, even if the sequence is used to streamline game play. Demonstrated awareness is also important because there are now no triggers that just resolve. The concept of No Visual Effect used to be important, but it quickly became clear that it was incompatible with opponents not having to announce triggers. If I played a creature, then attacked with a Kruin Striker, was is safe to block with a 2/3? Knowing the answer to that, at best, required the player to have a more complex understanding of policies outside the game rules, and at worst created an uncomfortable disagreement. Now the state of the game will be the same to both players based on the statements they make. A trigger is considered missed once the controller of the trigger has taken an action after the point at which a trigger should have resolved or, in the case of a trigger controlled by the non-active player, after that player has taken an action that indicates they have actively passed priority. Players may not cause triggered abilities to be missed by taking game actions or otherwise prematurely advancing the game. For example, if a player draws a card during his or her draw step without allowing the controller of a triggered ability that would trigger during that turn’s upkeep to resolve it, place that trigger on the stack at this point and issue no penalty. Still the same as before, with a little more language tightening. It’s important to recognize when the trigger actually should have resolved, versus when it logically might have. For example, if a trigger happens, and then the players play instants, once those have all resolved, the player still needs to take another action, since they haven’t clearly passed in a situation where the trigger would have resolved. Philosophy Triggered abilities are common and invisible, so players should not be harshly penalized when forgetting about one. Players are expected to remember their own triggers; intentionally ignoring one is considered Cheating — Fraud. However, remembering triggers that benefit you is a skill. Therefore, players are not required to point out missed triggers that they do not control, though they may do so if they wish. The basic philosophy now has actual rules support in the MTR. Technically, there hadn’t been any rule that allowed the opponents to overlook triggers, except that they wouldn’t be penalized for it. However, that would imply that the judges should step in on every trigger, even if they weren’t going to hand out a penalty. That’s been fixed now and the opponent’s responsibility is properly codified. This paragraph is the magical key to everything, and is worth breaking it down: The controller of the missed trigger only receives a Warning if the triggered ability is generally considered detrimental for the controlling player. The current game state is not a factor in determining this. What is “generally considered detrimental”? There’s a couple of guidelines that can be useful to figure this out. For one, if the trigger didn’t exist, would the card be played? Without its trigger, Dark Confidant is a 2/1 for 1B. That’s hardly going to make the cut, so I think we can safely say that Dark Confidant does not have a detrimental trigger. Detrimental triggers tend to be there to either make the card cheaper, or offset some other abusable advantage. Yes, sometimes a detrimental trigger can be beneficial, and sometimes decks are built to take advantage of this. However, we don’t want judges having to figure this out. Not only is it difficult, but it goes back to having to figure out the game state. That means if you notice one of these triggers out of the corner of your eye, how do you know if you should step in? Even if you’re watching, are you confident in evaluating if Dark Confidant is detrimental when the controller is at 1? At 2? It also means that you can have a pretty good idea of all the triggers in the format that you need to watch out for before the tournament even starts. For example, let’s say you’re judging an M13 draft. How many triggers do you need to watch out for? Well: That’s it! If you’re the kind of judge who likes to prepare in advance, this should make you comfortable. If you prefer to wing it, you should still be comfortable. And if you think it’s just a small list because it’s a core set, the super-complex Avacyn Restored has all of… nine. Whether a Warning is issued or not does not affect how the trigger is handled, and Failure to Maintain Game State penalties are never issued to players who did not control the ability. This is important to be clear on. When you are called to a table to handle a missed trigger, you decide whether to assess a Warning. Whatever you decide, you then follow the instructions in the Additional Remedy section. They will always work, no matter what the nature of the trigger. This also means that the damage the judge can do to the game is minimized. If they make a mistake and step in, the worst that happens is an incorrect Warning is handed out and the player might remember the trigger the next time. That’s a pretty acceptable outcome for a worst-case scenario. It means that the judges assess the penalty, but, as we’ll see below, the players determine the impact on the game state. That feels intuitively correct. Judges should not intervene in a missed trigger situation unless they intend to issue a Warning or have reason to suspect that the controller is intentionally missing his or her triggers. Players will obviously call a judge over to handle situations, but this guideline ensures you only need to intervene when a detrimental trigger is missed. Of course, you may see a suspicious situation, where you think a player is intentionally missing one of their triggers. It’s perfectly reasonable to pull them aside and investigate what’s going on. Additional Remedy As before, you start at the beginning and follow instructions until you find an answer. Once you do, you can stop reading. If the trigger specifies a default action associated with a choice made by the controller of the trigger (usually “If you don’t …” or “… unless”), resolve the default action immediately without using the stack. If there are unresolved spells or abilities that are no longer legal as a result of this action, rewind the game to remove all such spells or abilities. Resulting triggers generated by the action still trigger and resolve as normal. This piece has been part of missed trigger since the infraction was created, which is remarkable longevity. I *believe* that there are no triggers that fall into this category that aren’t detrimental; I’d be fascinated to see what that looked like. If the duration of the effect generated by the trigger has already expired, or the trigger was missed more than a turn ago, instruct the players to continue playing. A couple of big things here. We had a lot of debate over how long a trigger should stick around for the opponent to notice, from the same length as the controlling player to a full turn cycle. Too long and it increases the likelihood you get a weird interaction. Too short and you engender a lot of cheating accusations and feel-bads. A turn is a good compromise. It’s also something that, unlike turn cycle, doesn’t need a bunch of definition – it’s just that point a turn later. Player 1 missed an upkeep trigger during their turn, and now we’re in Player 2’s main phase? A turn has passed. Any time we can do something intuitive, it’s better than having another definition. Yes, that means turn cycles are gone from the IPG. Missed Trigger was the only place that really needed it, and the other usage – card in the wrong zone – works just as well with the simpler turn concept. The less judges have to learn, the better. Duration exipiry also avoids a few quirky situations. If a trigger was supposed to give +2/+2 to a creature until end of turn and it was missed, the other player doesn’t have the option to wait until their turn and then make it happen because they have Smite the Monstrous. Same with tokens from Geist of Saint Traft or Thatcher Revolt. Note that this statute of limitations only applies to the remedy. If the players call you over because one of them missed a Vampire Lacerator trigger three turns ago, you’ll still issue the Warning (and only one Warning, no matter how many have been missed), but not take any further action. Otherwise, the opponent may choose to have the controller play the triggered ability. If they do, insert the forgotten ability at the appropriate place or on the bottom of the stack. No player may make choices involving objects that were not in the zone or zones referenced by the trigger when the ability should have triggered. For example, if the ability instructs a player to sacrifice a creature, that player can’t sacrifice a creature that wasn’t on the battlefield when the ability should have triggered. … wait, where’s the rest? That’s it? Yep, it’s really that simple. When you’re called over, figure out whether you’re going to issue a Warning. Then, most of the time you’ll turn to the opponent and ask whether or not they would like the trigger to go on the stack now. This has some nifty consequences when you look at what used to be in the infraction. You may not have noticed that there’s no mention of ‘may’ triggers in the infraction any more. That’s because they work better here – if there’s a random reason that the opponent wants the trigger to go on the stack (a Restoration Angel with only Phantasmal Image as the target), it does so, and then, most likely, the controller opts to do nothing (may triggers with no implications on the game state – i.e. most of them – can just slide by as normal with both players ignoring it). Symmetric triggers, such as Howling Mine, also don’t need any special handling – your opponent decides both halves, so the incentives are in all the right places. Pick a favorite painful trigger and game it through the new remedy. I think you’ll find that you like the new result. The Team I’d be remiss if I didn’t acknowledge the team who worked on the newest revision. They get all the credit if this is good. I get all the blame if it isn’t. And everyone else who helped out and offered their feedback and suggestions over the past six months. Comments welcome, as always.
/** * @brief Decode a imu_calibration_params message into a struct * * @param msg The message to decode * @param imu_calibration_params C-struct to decode the message contents into */ static inline void mavlink_msg_imu_calibration_params_decode(const mavlink_message_t* msg, mavlink_imu_calibration_params_t* imu_calibration_params) { #if MAVLINK_NEED_BYTE_SWAP || !MAVLINK_ALIGNED_FIELDS imu_calibration_params->angle_ajusted = mavlink_msg_imu_calibration_params_get_angle_ajusted(msg); imu_calibration_params->believe_in_gyro = mavlink_msg_imu_calibration_params_get_believe_in_gyro(msg); imu_calibration_params->gyro_offet_x = mavlink_msg_imu_calibration_params_get_gyro_offet_x(msg); imu_calibration_params->gyro_offet_y = mavlink_msg_imu_calibration_params_get_gyro_offet_y(msg); imu_calibration_params->gyro_offet_z = mavlink_msg_imu_calibration_params_get_gyro_offet_z(msg); #else uint8_t len = msg->len < MAVLINK_MSG_ID_IMU_CALIBRATION_PARAMS_LEN? msg->len : MAVLINK_MSG_ID_IMU_CALIBRATION_PARAMS_LEN; memset(imu_calibration_params, 0, MAVLINK_MSG_ID_IMU_CALIBRATION_PARAMS_LEN); memcpy(imu_calibration_params, _MAV_PAYLOAD(msg), len); #endif }
// GetSlice returns a DataSlice of data from the GeoTIFF file func (g *GeoTIFF) GetSlice(offset uint, size uint) (*DataSlice, error) { buffer := make([]byte, size) _, err := g.source.ReadAt(buffer, int64(offset)) if err != nil { return nil, fmt.Errorf("cannot read data from file due to %v", err) } dataSlice := NewDataSlice(buffer, offset, g.littleEndian, g.bigTiff) return dataSlice, nil }
/** * Notify provider when the temp form was cast to normal form. * * @param formId, The Id of the form to update. * @param callerToken, Caller ability token. * @return none. */ int MockFormProviderClient::NotifyFormCastTempForm(const int64_t formId, const Want &want, const sptr<IRemoteObject> &callerToken) { APP_LOGD("Notify cast temp form"); return ERR_OK; }
def activate_ddp(self, rank: int): self._unroll_performer = (DDP( _UnrollPerformer(self), device_ids=[rank]), )
/** * Compiles user interface profile for a particular user. The profile contains essential information needed to efficiently render * user interface pages for specified user. * * This methods in this component may be quite costly to invoke. Therefore it should NOT be invoked for every request. * The methods are supposed to be invoked once (or several times) during user's session. The result of this method should be * cached in web session (in principal). * * @author Radovan semancik */ @Component public class UserProfileCompiler { private static final Trace LOGGER = TraceManager.getTrace(UserProfileCompiler.class); @Autowired private SecurityHelper securityHelper; @Autowired private SystemObjectCache systemObjectCache; @Autowired private RelationRegistry relationRegistry; @Autowired private PrismContext prismContext; @Autowired private MappingFactory mappingFactory; @Autowired private MappingEvaluator mappingEvaluator; @Autowired private ActivationComputer activationComputer; @Autowired private Clock clock; @Autowired @Qualifier("modelObjectResolver") private ObjectResolver objectResolver; @Autowired @Qualifier("cacheRepositoryService") private RepositoryService repositoryService; public void compileUserProfile(MidPointUserProfilePrincipal principal, PrismObject<SystemConfigurationType> systemConfiguration, AuthorizationTransformer authorizationTransformer, Task task, OperationResult result) throws SchemaException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { principal.setApplicableSecurityPolicy(securityHelper.locateSecurityPolicy(principal.getUser().asPrismObject(), systemConfiguration, task, result)); Collection<Authorization> authorizations = principal.getAuthorities(); List<AdminGuiConfigurationType> adminGuiConfigurations = new ArrayList<>(); collect(adminGuiConfigurations, principal, systemConfiguration, authorizationTransformer, task, result); CompiledUserProfile compiledUserProfile = compileUserProfile(adminGuiConfigurations, systemConfiguration, task, result); principal.setCompiledUserProfile(compiledUserProfile); } private void collect(List<AdminGuiConfigurationType> adminGuiConfigurations, MidPointUserProfilePrincipal principal, PrismObject<SystemConfigurationType> systemConfiguration, AuthorizationTransformer authorizationTransformer, Task task, OperationResult result) throws SchemaException { UserType userType = principal.getUser(); LensContext<UserType> lensContext = createAuthenticationLensContext(userType.asPrismObject(), systemConfiguration); Collection<AssignmentType> forcedAssignments = null; try { forcedAssignments = LensUtil.getForcedAssignments(lensContext.getFocusContext().getLifecycleModel(), userType.getLifecycleState(), objectResolver, prismContext, task, result); } catch (ObjectNotFoundException | CommunicationException | ConfigurationException | SecurityViolationException | ExpressionEvaluationException e1) { LOGGER.error("Forced assignments defined for lifecycle {} won't be evaluated", userType.getLifecycleState(), e1); } if (!userType.getAssignment().isEmpty() || forcedAssignments != null) { AssignmentEvaluator.Builder<UserType> builder = new AssignmentEvaluator.Builder<UserType>() .repository(repositoryService) .focusOdo(new ObjectDeltaObject<>(userType.asPrismObject(), null, userType.asPrismObject())) .channel(null) .objectResolver(objectResolver) .systemObjectCache(systemObjectCache) .relationRegistry(relationRegistry) .prismContext(prismContext) .mappingFactory(mappingFactory) .mappingEvaluator(mappingEvaluator) .activationComputer(activationComputer) .now(clock.currentTimeXMLGregorianCalendar()) // We do need only authorizations + gui config. Therefore we not need to evaluate // constructions and the like, so switching it off makes the evaluation run faster. // It also avoids nasty problems with resources being down, // resource schema not available, etc. .loginMode(true) // We do not have real lens context here. But the push methods in ModelExpressionThreadLocalHolder // will need something to push on the stack. So give them context placeholder. .lensContext(lensContext); AssignmentEvaluator<UserType> assignmentEvaluator = builder.build(); evaluateAssignments(userType.getAssignment(), false, assignmentEvaluator, principal, authorizationTransformer, adminGuiConfigurations, task, result); evaluateAssignments(forcedAssignments, true, assignmentEvaluator, principal, authorizationTransformer, adminGuiConfigurations, task, result); } if (userType.getAdminGuiConfiguration() != null) { // config from the user object should go last (to be applied as the last one) adminGuiConfigurations.add(userType.getAdminGuiConfiguration()); } } private void evaluateAssignments(Collection<AssignmentType> assignments, boolean virtual, AssignmentEvaluator<UserType> assignmentEvaluator, MidPointPrincipal principal, AuthorizationTransformer authorizationTransformer, Collection<AdminGuiConfigurationType> adminGuiConfigurations, Task task, OperationResult result) { UserType userType = principal.getUser(); Collection<Authorization> authorizations = principal.getAuthorities(); try { RepositoryCache.enter(); for (AssignmentType assignmentType: assignments) { try { ItemDeltaItem<PrismContainerValue<AssignmentType>,PrismContainerDefinition<AssignmentType>> assignmentIdi = new ItemDeltaItem<>(); assignmentIdi.setItemOld(LensUtil.createAssignmentSingleValueContainerClone(assignmentType)); assignmentIdi.recompute(); EvaluatedAssignment<UserType> assignment = assignmentEvaluator.evaluate(assignmentIdi, PlusMinusZero.ZERO, false, userType, userType.toString(), virtual, task, result); if (assignment.isValid()) { addAuthorizations(authorizations, assignment.getAuthorizations(), authorizationTransformer); adminGuiConfigurations.addAll(assignment.getAdminGuiConfigurations()); } for (EvaluatedAssignmentTarget target : assignment.getRoles().getNonNegativeValues()) { if (target.isValid() && target.getTarget() != null && target.getTarget().asObjectable() instanceof UserType && DeputyUtils.isDelegationPath(target.getAssignmentPath(), relationRegistry)) { List<OtherPrivilegesLimitationType> limitations = DeputyUtils.extractLimitations(target.getAssignmentPath()); principal.addDelegatorWithOtherPrivilegesLimitations(new DelegatorWithOtherPrivilegesLimitations( (UserType) target.getTarget().asObjectable(), limitations)); } } } catch (SchemaException | ObjectNotFoundException | ExpressionEvaluationException | PolicyViolationException | SecurityViolationException | ConfigurationException | CommunicationException e) { LOGGER.error("Error while processing assignment of {}: {}; assignment: {}", userType, e.getMessage(), assignmentType, e); } } } finally { RepositoryCache.exit(); } } private LensContext<UserType> createAuthenticationLensContext(PrismObject<UserType> user, PrismObject<SystemConfigurationType> systemConfiguration) throws SchemaException { LensContext<UserType> lensContext = new LensContextPlaceholder<>(user, prismContext); if (systemConfiguration != null) { ObjectPolicyConfigurationType policyConfigurationType = determineObjectPolicyConfiguration(user, systemConfiguration); lensContext.getFocusContext().setObjectPolicyConfigurationType(policyConfigurationType); } return lensContext; } private ObjectPolicyConfigurationType determineObjectPolicyConfiguration(PrismObject<UserType> user, PrismObject<SystemConfigurationType> systemConfiguration) throws SchemaException { ObjectPolicyConfigurationType policyConfigurationType; try { policyConfigurationType = ModelUtils.determineObjectPolicyConfiguration(user, systemConfiguration.asObjectable()); } catch (ConfigurationException e) { throw new SchemaException(e.getMessage(), e); } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Selected policy configuration from subtypes {}:\n{}", FocusTypeUtil.determineSubTypes(user), policyConfigurationType==null?null:policyConfigurationType.asPrismContainerValue().debugDump(1)); } return policyConfigurationType; } private void addAuthorizations(Collection<Authorization> targetCollection, Collection<Authorization> sourceCollection, AuthorizationTransformer authorizationTransformer) { if (sourceCollection == null) { return; } for (Authorization autz: sourceCollection) { if (authorizationTransformer == null) { targetCollection.add(autz); } else { Collection<Authorization> transformedAutzs = authorizationTransformer.transform(autz); if (transformedAutzs != null) { targetCollection.addAll(transformedAutzs); } } } } public CompiledUserProfile compileUserProfile(@NotNull List<AdminGuiConfigurationType> adminGuiConfigurations, PrismObject<SystemConfigurationType> systemConfiguration, Task task, OperationResult result) throws SchemaException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { AdminGuiConfigurationType globalAdminGuiConfig = null; if (systemConfiguration != null) { globalAdminGuiConfig = systemConfiguration.asObjectable().getAdminGuiConfiguration(); } // if there's no admin config at all, return null (to preserve original behavior) if (adminGuiConfigurations.isEmpty() && globalAdminGuiConfig == null) { return null; } CompiledUserProfile composite = new CompiledUserProfile(); if (globalAdminGuiConfig != null) { applyAdminGuiConfiguration(composite, globalAdminGuiConfig, task, result); } for (AdminGuiConfigurationType adminGuiConfiguration: adminGuiConfigurations) { applyAdminGuiConfiguration(composite, adminGuiConfiguration, task, result); } return composite; } private void applyAdminGuiConfiguration(CompiledUserProfile composite, AdminGuiConfigurationType adminGuiConfiguration, Task task, OperationResult result) throws SchemaException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { if (adminGuiConfiguration == null) { return; } adminGuiConfiguration.getAdditionalMenuLink().forEach(additionalMenuLink -> composite.getAdditionalMenuLink().add(additionalMenuLink.clone())); adminGuiConfiguration.getUserDashboardLink().forEach(userDashboardLink -> composite.getUserDashboardLink().add(userDashboardLink.clone())); if (adminGuiConfiguration.getDefaultTimezone() != null) { composite.setDefaultTimezone(adminGuiConfiguration.getDefaultTimezone()); } if (adminGuiConfiguration.getPreferredDataLanguage() != null) { composite.setPreferredDataLanguage(adminGuiConfiguration.getPreferredDataLanguage()); } if (adminGuiConfiguration.isEnableExperimentalFeatures() != null) { composite.setEnableExperimentalFeatures(adminGuiConfiguration.isEnableExperimentalFeatures()); } if (adminGuiConfiguration.getDefaultExportSettings() != null) { composite.setDefaultExportSettings(adminGuiConfiguration.getDefaultExportSettings().clone()); } if (adminGuiConfiguration.getDisplayFormats() != null){ composite.setDisplayFormats(adminGuiConfiguration.getDisplayFormats().clone()); } applyViews(composite, adminGuiConfiguration.getObjectLists(), task, result); // Compatibility, deprecated applyViews(composite, adminGuiConfiguration.getObjectCollectionViews(), task, result); if (adminGuiConfiguration.getObjectForms() != null) { if (composite.getObjectForms() == null) { composite.setObjectForms(adminGuiConfiguration.getObjectForms().clone()); } else { for (ObjectFormType objectForm: adminGuiConfiguration.getObjectForms().getObjectForm()) { joinForms(composite.getObjectForms(), objectForm.clone()); } } } if (adminGuiConfiguration.getObjectDetails() != null) { if (composite.getObjectDetails() == null) { composite.setObjectDetails(adminGuiConfiguration.getObjectDetails().clone()); } else { for (GuiObjectDetailsPageType objectDetails: adminGuiConfiguration.getObjectDetails().getObjectDetailsPage()) { joinObjectDetails(composite.getObjectDetails(), objectDetails); } } } if (adminGuiConfiguration.getUserDashboard() != null) { if (composite.getUserDashboard() == null) { composite.setUserDashboard(adminGuiConfiguration.getUserDashboard().clone()); } else { for (DashboardWidgetType widget: adminGuiConfiguration.getUserDashboard().getWidget()) { mergeWidget(composite, widget); } } } for (UserInterfaceFeatureType feature: adminGuiConfiguration.getFeature()) { mergeFeature(composite, feature.clone()); } if (adminGuiConfiguration.getFeedbackMessagesHook() != null) { composite.setFeedbackMessagesHook(adminGuiConfiguration.getFeedbackMessagesHook().clone()); } if (adminGuiConfiguration.getRoleManagement() != null && adminGuiConfiguration.getRoleManagement().getAssignmentApprovalRequestLimit() != null) { if (composite.getRoleManagement() != null && composite.getRoleManagement().getAssignmentApprovalRequestLimit() != null) { // the greater value wins (so it is possible to give an exception to selected users) Integer newValue = Math.max( adminGuiConfiguration.getRoleManagement().getAssignmentApprovalRequestLimit(), composite.getRoleManagement().getAssignmentApprovalRequestLimit()); composite.getRoleManagement().setAssignmentApprovalRequestLimit(newValue); } else { if (composite.getRoleManagement() == null) { composite.setRoleManagement(new AdminGuiConfigurationRoleManagementType()); } composite.getRoleManagement().setAssignmentApprovalRequestLimit( adminGuiConfiguration.getRoleManagement().getAssignmentApprovalRequestLimit()); } } } private void applyViews(CompiledUserProfile composite, GuiObjectListViewsType viewsType, Task task, OperationResult result) throws SchemaException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { if (viewsType == null) { return; } if (viewsType.getDefault() != null) { if (composite.getDefaultObjectCollectionView() == null) { composite.setDefaultObjectCollectionView(new CompiledObjectCollectionView()); } compileView(composite.getDefaultObjectCollectionView(), viewsType.getDefault(), task, result); } for (GuiObjectListViewType objectCollectionView : viewsType.getObjectList()) { // Compatibility, legacy applyView(composite, objectCollectionView, task, result); } for (GuiObjectListViewType objectCollectionView : viewsType.getObjectCollectionView()) { applyView(composite, objectCollectionView, task, result); } } private void applyView(CompiledUserProfile composite, GuiObjectListViewType objectListViewType, Task task, OperationResult result) throws SchemaException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { CompiledObjectCollectionView existingView = findOrCreateMatchingView(composite, objectListViewType); compileView(existingView, objectListViewType, task, result); } private CompiledObjectCollectionView findOrCreateMatchingView(CompiledUserProfile composite, GuiObjectListViewType objectListViewType) { QName objectType = objectListViewType.getType(); String viewIdentifier = determineViewIdentifier(objectListViewType); CompiledObjectCollectionView existingView = composite.findObjectCollectionView(objectType, viewIdentifier); if (existingView == null) { existingView = new CompiledObjectCollectionView(objectType, viewIdentifier); composite.getObjectCollectionViews().add(existingView); } return existingView; } private String determineViewIdentifier(GuiObjectListViewType objectListViewType) { String viewIdentifier = objectListViewType.getIdentifier(); if (viewIdentifier != null) { return viewIdentifier; } String viewName = objectListViewType.getName(); if (viewName != null) { // legacy, deprecated return viewName; } CollectionRefSpecificationType collection = objectListViewType.getCollection(); if (collection == null) { return objectListViewType.getType().getLocalPart(); } ObjectReferenceType collectionRef = collection.getCollectionRef(); if (collectionRef == null) { return objectListViewType.getType().getLocalPart(); } return collectionRef.getOid(); } private void compileView(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType, Task task, OperationResult result) throws SchemaException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { compileActions(existingView, objectListViewType); compileAdditionalPanels(existingView, objectListViewType); compileColumns(existingView, objectListViewType); compileDisplay(existingView, objectListViewType); compileDistinct(existingView, objectListViewType); compileSorting(existingView, objectListViewType); compileSearchBox(existingView, objectListViewType); compileCollection(existingView, objectListViewType, task, result); } private void compileActions(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType) { List<GuiActionType> newActions = objectListViewType.getAction(); for (GuiActionType newAction: newActions) { // TODO: check for action duplication/override existingView.getActions().add(newAction); // No need to clone, CompiledObjectCollectionView is not prism } } private void compileAdditionalPanels(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType) { GuiObjectListViewAdditionalPanelsType newAdditionalPanels = objectListViewType.getAdditionalPanels(); if (newAdditionalPanels == null) { return; } // TODO: later: merge additional panel definitions existingView.setAdditionalPanels(newAdditionalPanels); } private void compileCollection(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType, Task task, OperationResult result) throws SchemaException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { CollectionRefSpecificationType collectionSpec = objectListViewType.getCollection(); if (collectionSpec == null) { ObjectReferenceType collectionRef = objectListViewType.getCollectionRef(); if (collectionRef == null) { return; } // Legacy, deprecated collectionSpec = new CollectionRefSpecificationType(); collectionSpec.setCollectionRef(collectionRef.clone()); } if (existingView.getCollection() != null) { LOGGER.debug("Redefining collection in view {}", existingView.getViewIdentifier()); } existingView.setCollection(collectionSpec); compileCollection(existingView, collectionSpec, task, result); } private void compileCollection(CompiledObjectCollectionView existingView, CollectionRefSpecificationType collectionSpec, Task task, OperationResult result) throws SchemaException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { ObjectReferenceType collectionRef = collectionSpec.getCollectionRef(); QName targetObjectType = existingView.getObjectType(); Class<? extends ObjectType> targetTypeClass = ObjectType.class; if (targetObjectType != null) { targetTypeClass = ObjectTypes.getObjectTypeFromTypeQName(targetObjectType).getClassDefinition(); } QName collectionRefType = collectionRef.getType(); // TODO: support more cases if (QNameUtil.match(ArchetypeType.COMPLEX_TYPE, collectionRefType)) { RefFilter filter = null; filter = (RefFilter) prismContext.queryFor(AssignmentHolderType.class) .item(AssignmentHolderType.F_ARCHETYPE_REF).ref(collectionRef.getOid()) .buildFilter(); filter.setTargetTypeNullAsAny(true); filter.setRelationNullAsAny(true); existingView.setFilter(filter); try { PrismObject<ArchetypeType> archetype = systemObjectCache.getArchetype(collectionRef.getOid(), result); ArchetypePolicyType archetypePolicy = archetype.asObjectable().getArchetypePolicy(); if (archetypePolicy != null) { DisplayType archetypeDisplay = archetypePolicy.getDisplay(); if (archetypeDisplay != null) { DisplayType viewDisplay = existingView.getDisplay(); if (viewDisplay == null) { viewDisplay = new DisplayType(); existingView.setDisplay(viewDisplay); } mergeDisplay(viewDisplay, archetypeDisplay); } } } catch (ObjectNotFoundException e) { // We do not want to throw exception here. This code takes place at login time. // We do not want to stop all logins because of missing archetype. LOGGER.warn("Archetype {} referenced from view {} was not found", collectionRef.getOid(), existingView.getViewIdentifier()); } return; } if (QNameUtil.match(ObjectCollectionType.COMPLEX_TYPE, collectionRefType)) { ObjectCollectionType objectCollectionType; try { // TODO: caching? objectCollectionType = objectResolver.resolve(collectionRef, ObjectCollectionType.class, null, "view "+existingView.getViewIdentifier(), task, result); } catch (ObjectNotFoundException e) { throw new ConfigurationException(e.getMessage(), e); } SearchFilterType collectionFilterType = objectCollectionType.getFilter(); ObjectFilter collectionFilter = null; if (collectionFilterType != null) { collectionFilter = prismContext.getQueryConverter().parseFilter(collectionFilterType, targetTypeClass); } CollectionRefSpecificationType baseCollectionSpec = objectCollectionType.getBaseCollection(); if (baseCollectionSpec == null) { existingView.setFilter(collectionFilter); } else { compileCollection(existingView, baseCollectionSpec, task, result); ObjectFilter baseFilter = existingView.getFilter(); ObjectFilter combinedFilter = ObjectQueryUtil.filterAnd(baseFilter, collectionFilter, prismContext); existingView.setFilter(combinedFilter); } return; } // TODO throw new UnsupportedOperationException("Unsupported collection type: " + collectionRefType); } private void mergeDisplay(DisplayType viewDisplay, DisplayType archetypeDisplay) { if (viewDisplay.getLabel() == null) { viewDisplay.setLabel(archetypeDisplay.getLabel()); } if (viewDisplay.getPluralLabel() == null) { viewDisplay.setPluralLabel(archetypeDisplay.getPluralLabel()); } IconType archetypeIcon = archetypeDisplay.getIcon(); if (archetypeIcon != null) { IconType viewIcon = viewDisplay.getIcon(); if (viewIcon == null) { viewIcon = new IconType(); viewDisplay.setIcon(viewIcon); } if (viewIcon.getCssClass() == null) { viewIcon.setCssClass(archetypeIcon.getCssClass()); } if (viewIcon.getColor() == null) { viewIcon.setColor(archetypeIcon.getColor()); } } } private void compileColumns(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType) { List<GuiObjectColumnType> newColumns = objectListViewType.getColumn(); if (newColumns == null || newColumns.isEmpty()) { return; } // Not very efficient algorithm. But must do for now. List<GuiObjectColumnType> existingColumns = existingView.getColumns(); existingColumns.addAll(newColumns); List<GuiObjectColumnType> orderedList = orderCustomColumns(existingColumns); existingColumns.clear(); existingColumns.addAll(orderedList); } private void compileDisplay(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType) { DisplayType newDisplay = objectListViewType.getDisplay(); if (newDisplay == null) { return; } if (existingView.getDisplay() == null) { existingView.setDisplay(newDisplay); } mergeDisplay(existingView.getDisplay(), newDisplay); } private void compileDistinct(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType) { DistinctSearchOptionType newDistinct = objectListViewType.getDistinct(); if (newDistinct == null) { return; } existingView.setDistinct(newDistinct); } private void compileSorting(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType) { Boolean newDisableSorting = objectListViewType.isDisableSorting(); if (newDisableSorting != null) { existingView.setDisableSorting(newDisableSorting); } } private void compileSearchBox(CompiledObjectCollectionView existingView, GuiObjectListViewType objectListViewType) { SearchBoxConfigurationType newSearchBoxConfig = objectListViewType.getSearchBoxConfiguration(); if (newSearchBoxConfig == null) { return; } // TODO: merge existingView.setSearchBoxConfiguration(newSearchBoxConfig); } private void joinForms(ObjectFormsType objectForms, ObjectFormType newForm) { objectForms.getObjectForm().removeIf(currentForm -> isTheSameObjectForm(currentForm, newForm)); objectForms.getObjectForm().add(newForm.clone()); } private void joinObjectDetails(GuiObjectDetailsSetType objectDetailsSet, GuiObjectDetailsPageType newObjectDetails) { objectDetailsSet.getObjectDetailsPage().removeIf(currentDetails -> isTheSameObjectType(currentDetails, newObjectDetails)); objectDetailsSet.getObjectDetailsPage().add(newObjectDetails.clone()); } private boolean isTheSameObjectType(AbstractObjectTypeConfigurationType oldConf, AbstractObjectTypeConfigurationType newConf) { return QNameUtil.match(oldConf.getType(), newConf.getType()); } private boolean isTheSameObjectForm(ObjectFormType oldForm, ObjectFormType newForm){ if (!isTheSameObjectType(oldForm,newForm)) { return false; } if (oldForm.isIncludeDefaultForms() != null && newForm.isIncludeDefaultForms() != null){ return true; } if (oldForm.getFormSpecification() == null && newForm.getFormSpecification() == null) { String oldFormPanelUri = oldForm.getFormSpecification().getPanelUri(); String newFormPanelUri = newForm.getFormSpecification().getPanelUri(); if (oldFormPanelUri != null && oldFormPanelUri.equals(newFormPanelUri)) { return true; } String oldFormPanelClass = oldForm.getFormSpecification().getPanelClass(); String newFormPanelClass = newForm.getFormSpecification().getPanelClass(); if (oldFormPanelClass != null && oldFormPanelClass.equals(newFormPanelClass)) { return true; } String oldFormRefOid = oldForm.getFormSpecification().getFormRef() == null ? null : oldForm.getFormSpecification().getFormRef().getOid(); String newFormRefOid = newForm.getFormSpecification().getFormRef() == null ? null : newForm.getFormSpecification().getFormRef().getOid(); if (oldFormRefOid != null && oldFormRefOid.equals(newFormRefOid)) { return true; } } return false; } private void mergeWidget(CompiledUserProfile composite, DashboardWidgetType newWidget) { String newWidgetIdentifier = newWidget.getIdentifier(); DashboardWidgetType compositeWidget = composite.findUserDashboardWidget(newWidgetIdentifier); if (compositeWidget == null) { composite.getUserDashboard().getWidget().add(newWidget.clone()); } else { mergeWidget(compositeWidget, newWidget); } } private void mergeWidget(DashboardWidgetType compositeWidget, DashboardWidgetType newWidget) { mergeFeature(compositeWidget, newWidget, UserInterfaceElementVisibilityType.VACANT); // merge other widget properties (in the future) } private void mergeFeature(CompiledUserProfile composite, UserInterfaceFeatureType newFeature) { String newIdentifier = newFeature.getIdentifier(); UserInterfaceFeatureType compositeFeature = composite.findFeature(newIdentifier); if (compositeFeature == null) { composite.getFeatures().add(newFeature.clone()); } else { mergeFeature(compositeFeature, newFeature, UserInterfaceElementVisibilityType.AUTOMATIC); } } private <T extends UserInterfaceFeatureType> void mergeFeature(T compositeFeature, T newFeature, UserInterfaceElementVisibilityType defaultVisibility) { UserInterfaceElementVisibilityType newCompositeVisibility = mergeVisibility(compositeFeature.getVisibility(), newFeature.getVisibility(), defaultVisibility); compositeFeature.setVisibility(newCompositeVisibility); } private UserInterfaceElementVisibilityType mergeVisibility( UserInterfaceElementVisibilityType compositeVisibility, UserInterfaceElementVisibilityType newVisibility, UserInterfaceElementVisibilityType defaultVisibility) { if (compositeVisibility == null) { compositeVisibility = defaultVisibility; } if (newVisibility == null) { newVisibility = defaultVisibility; } if (compositeVisibility == UserInterfaceElementVisibilityType.HIDDEN || newVisibility == UserInterfaceElementVisibilityType.HIDDEN) { return UserInterfaceElementVisibilityType.HIDDEN; } if (compositeVisibility == UserInterfaceElementVisibilityType.VISIBLE || newVisibility == UserInterfaceElementVisibilityType.VISIBLE) { return UserInterfaceElementVisibilityType.VISIBLE; } if (compositeVisibility == UserInterfaceElementVisibilityType.AUTOMATIC || newVisibility == UserInterfaceElementVisibilityType.AUTOMATIC) { return UserInterfaceElementVisibilityType.AUTOMATIC; } return UserInterfaceElementVisibilityType.VACANT; } /* the ordering algorithm is: the first level is occupied by the column which previousColumn == null || "" || notExistingColumnNameValue. Each next level contains columns which previousColumn == columnNameFromPreviousLevel */ private List<GuiObjectColumnType> orderCustomColumns(List<GuiObjectColumnType> customColumns){ if (customColumns == null || customColumns.size() == 0){ return new ArrayList<>(); } List<GuiObjectColumnType> customColumnsList = new ArrayList<>(customColumns); List<String> previousColumnValues = new ArrayList<>(); previousColumnValues.add(null); previousColumnValues.add(""); Map<String, String> columnRefsMap = new HashMap<>(); for (GuiObjectColumnType column : customColumns){ columnRefsMap.put(column.getName(), column.getPreviousColumn() == null ? "" : column.getPreviousColumn()); } List<String> temp = new ArrayList<> (); int index = 0; while (index < customColumns.size()){ int sortFrom = index; for (int i = index; i < customColumnsList.size(); i++){ GuiObjectColumnType column = customColumnsList.get(i); if (previousColumnValues.contains(column.getPreviousColumn()) || !columnRefsMap.containsKey(column.getPreviousColumn())){ Collections.swap(customColumnsList, index, i); index++; temp.add(column.getName()); } } if (temp.size() == 0){ temp.add(customColumnsList.get(index).getName()); index++; } if (index - sortFrom > 1){ customColumnsList.subList(sortFrom, index - 1) .sort((o1, o2) -> String.CASE_INSENSITIVE_ORDER.compare(o1.getName(), o2.getName())); } previousColumnValues.clear(); previousColumnValues.addAll(temp); temp.clear(); } return customColumnsList; } public CompiledUserProfile getGlobalCompiledUserProfile(Task task, OperationResult parentResult) throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { PrismObject<SystemConfigurationType> systemConfiguration = systemObjectCache.getSystemConfiguration(parentResult); if (systemConfiguration == null) { return null; } List<AdminGuiConfigurationType> adminGuiConfigurations = new ArrayList<>(); CompiledUserProfile compiledUserProfile = compileUserProfile(adminGuiConfigurations, systemConfiguration, task, parentResult); // TODO: cache compiled profile return compiledUserProfile; } }
<reponame>ascr-ecx/eth<gh_stars>1-10 #! /bin/env vtkpython import sys, os from vtk import * dset = sys.argv[1] num = int(sys.argv[2]) base = dset.rsplit('.', 1)[0] ext = dset.rsplit('.', 1)[1] if ext == 'nc': r = vtkMPASReader() r.SetFileName(dset) r.SetLayerThickness(1) r.SetCenterLon(0) r.SetProjectLatLon(True) r.SetShowMultilayerView(True) r.SetVerticalLevel(0) r.UpdateInformation() for i in range(r.GetNumberOfCellArrays()): var = r.GetCellArrayName(i) if var in sys.argv[3:]: r.SetCellArrayStatus(var, True) else: r.SetCellArrayStatus(var, False) for i in range(r.GetNumberOfPointArrays()): var = r.GetPointArrayName(i) if var in sys.argv[3:]: r.SetPointArrayStatus(var, True) else: r.SetPointArrayStatus(var, False) elif ext == 'vtu': r = vtkXMLUnstructuredGridReader() r.SetFileName(dset) else: print 'unknown file type: ', ext sys.exit(1) print 'loading data...' r.Update() print 'data loaded, building Kd Tree ...' kd = vtkKdTree() kd.SetDataSet(r.GetOutput()) kd.SetNumberOfRegionsOrMore(num) kd.OmitZPartitioning() print '... building Locator' kd.BuildLocator() print '... building cell lists' kd.IncludeRegionBoundaryCellsOn() kd.CreateCellLists() kd.Update() print 'Kd Tree built' b = open(base + '.rgn', 'w') for i in range(kd.GetNumberOfRegions()): d = [0] * 6 kd.GetRegionBounds(i, d) b.write('%f %f %f %f %f %f\n' % tuple(d)) b.close() ec = vtkExtractCells() ec.SetInputConnection(r.GetOutputPort()) pa = vtkPassArrays() pa.SetInputConnection(ec.GetOutputPort()) pa.AddCellDataArray('vtkOriginalCellIds') pa.RemoveArraysOn() to_tets = vtkDataSetTriangleFilter() to_tets.SetInputConnection(pa.GetOutputPort()) aa = vtkAssignAttribute() aa.SetInputConnection(to_tets.GetOutputPort()) aa.Assign('salinity', 'SCALARS', 'POINT_DATA') trim = vtkThreshold() trim.SetInputConnection(aa.GetOutputPort()) trim.ThresholdByUpper(-1e30) trim.AllScalarsOn() w = vtkXMLUnstructuredGridWriter() w.SetInputConnection(trim.GetOutputPort()) for i in range(kd.GetNumberOfRegions()): ofile = '%s-%d.vtu' % (dset.rsplit('.', 1)[0], i) if os.path.isfile(ofile): print 'skipping ', ofile continue i0 = kd.GetCellList(i) i1 = kd.GetBoundaryCellList(i) pids = vtkIdList() pids.SetNumberOfIds(i0.GetNumberOfIds() + i1.GetNumberOfIds()) pids.SetNumberOfIds(i0.GetNumberOfIds()) for j in range(i0.GetNumberOfIds()): pids.InsertId(j, i0.GetId(j)) for j in range(i1.GetNumberOfIds()): pids.InsertId(j + i0.GetNumberOfIds(), i1.GetId(j)) ec.SetCellList(pids) w.SetFileName('%s-%d.vtu' % (dset.rsplit('.', 1)[0], i)) w.Update() print 'wrote part %d of %d' % (i, kd.GetNumberOfRegions())
<filename>day2/day2.go<gh_stars>0 package day2 import ( "bufio" "fmt" "io" "os" "strconv" "strings" ) type Entry struct { policy Policy pass string } type Policy struct { char byte lo int hi int } func Day2() { input, err := parseInput() if err != nil { panic(err) } fmt.Printf("Day 2 part 1 answer is %d\n", CountValidPasswords1(input)) fmt.Printf("Day 2 part 2 answer is %d\n", CountValidPasswords2(input)) } func CountValidPasswords1(input []*Entry) int { out := 0 for _, entry := range input { if isValid1(entry) { out++ } } return out } func CountValidPasswords2(input []*Entry) int { out := 0 for _, entry := range input { if isValid2(entry) { out++ } } return out } func isValid1(entry *Entry) bool { count := 0 for _, r := range entry.pass { if byte(r) == entry.policy.char { count++ } if count > entry.policy.hi { return false } } return count >= entry.policy.lo } func isValid2(entry *Entry) bool { return len(entry.pass) >= entry.policy.lo && ((entry.pass[entry.policy.lo-1] == entry.policy.char && (len(entry.pass) >= entry.policy.hi && entry.pass[entry.policy.hi-1] != entry.policy.char || len(entry.pass) < entry.policy.hi)) || (entry.pass[entry.policy.lo-1] != entry.policy.char && len(entry.pass) >= entry.policy.hi && entry.pass[entry.policy.hi-1] == entry.policy.char)) } func parseInput() ([]*Entry, error) { f, err := os.Open("./input.txt") if err != nil { return nil, err } return readInput(f) } func readInput(r io.Reader) ([]*Entry, error) { scanner := bufio.NewScanner(r) scanner.Split(bufio.ScanLines) var result []*Entry for scanner.Scan() { // 1-10 a: abcd -> ["1-10 a", "abcd"] x := strings.SplitN(scanner.Text(), ": ", 2) // 1-10 a -> ["1-10", "a"] p := strings.SplitN(x[0], " ", 2) // 1-10 -> ["1", "10"] r := strings.SplitN(p[0], "-", 2) lo, err := strconv.Atoi(r[0]) if err != nil { return result, err } hi, err := strconv.Atoi(r[1]) if err != nil { return result, err } result = append(result, &Entry{ pass: x[1], policy: Policy{ char: p[1][0], lo: lo, hi: hi, }, }) } return result, scanner.Err() }
package service; /** * @Author: lsg * @Date: 2019/12/27 10:45 * @Description: */ public class Test { public static void main(String[] args) { ThreadService threadService = new ThreadService(); for (int i = 0; i < 10; i++) { threadService.schedule(new CallbacksHandler() { @Override public void onSuccess(int count) throws Exception { System.out.println("处理成功"); } @Override public void onFail(int count) throws Exception { System.out.println("处理失败"); } @Override public void onUnknown(int count) throws Exception { System.out.println("处理未知"); } }); } } }
def interpretLossAngles(coat): if 'lossBhighn' in coat and 'lossShighn' in coat: if 'lossBhighn_slope' in coat: def lossBhighn(f): return coat.lossBhighn * (f / 100)**coat.lossBhighn_slope else: def lossBhighn(f): return coat.lossBhighn if 'lossShighn_slope' in coat: def lossShighn(f): return coat.lossShighn * (f / 100)**coat.lossShighn_slope else: def lossShighn(f): return coat.lossShighn else: if 'Phihighn_slope' in coat: def Phihighn(f): return coat.Phihighn * (f / 100)**coat.Phihighn_slope lossBhighn = lossShighn = Phihighn else: lossBhighn = lossShighn = lambda f: coat.Phihighn if 'lossBlown' in coat and 'lossSlown' in coat: if 'lossBlown_slope' in coat: def lossBlown(f): return coat.lossBlown * (f / 100)**coat.lossBlown_slope else: def lossBlown(f): return coat.lossBlown if 'lossSlown_slope' in coat: def lossSlown(f): return coat.lossSlown * (f / 100)**coat.lossSlown_slope else: def lossSlown(f): return coat.lossSlown else: if 'Philown_slope' in coat: def Philown(f): return coat.Philown * (f / 100)**coat.Philown_slope lossBlown = lossSlown = Philown else: lossBlown = lossSlown = lambda f: coat.Philown return lossBhighn, lossShighn, lossBlown, lossSlown
def move_coords(self, mask_coords, direction, stride=1): batch_size = len(mask_coords) new_coords = np.copy(mask_coords) for b in range(batch_size): new_coords[b] = self.move_coords_single(mask_coords[b], direction, stride) return new_coords
n = int(input()) s = input() count = {0: 0, 1: 0} r = [] for c in s: if c == '(': binn = min(count.items(), key=lambda p: p[1]) count[binn[0]] += 1 r.append(binn[0]) else: binn = max(count.items(), key=lambda p: p[1]) count[binn[0]] -= 1 r.append(binn[0]) print(''.join([str(c) for c in r]))
#include <cstdio> #include <cstdlib> #include <iostream> #include <algorithm> using namespace std; int main() { /* * n = int(raw_input()) g_sum = 0 a_sum = 0 rez = "" for i in range(n): a,g = map(int,raw_input().split(" ")) if g_sum == a_sum: if a < g: rez += "A" a_sum += a else: rez += "G" g_sum += g else: if a_sum > g_sum: if a_sum-g_sum + a > abs(a_sum-g_sum - g): rez += "G" g_sum += g else: rez += "A" a_sum += a else: if g_sum - a_sum + g > abs(g_sum - a_sum -a): rez += "A" a_sum += a else: rez += "G" g_sum += g print rez * */ long long n, g_sum, a_sum, a, g; cin >> n; g_sum = 0; a_sum = 0; string rez = ""; for(int i = 0; i < n; i++) { cin >> a; cin >> g; if (g_sum == a_sum) { if (a < g) { rez.push_back('A'); a_sum += a; } else { rez.push_back('G'); g_sum += g; } } else { if(a_sum > g_sum) { if (a_sum-g_sum + a > abs(a_sum-g_sum -g)) { rez.push_back('G'); g_sum += g; } else { rez.push_back('A'); a_sum += a; } } else { if (g_sum-a_sum + g > abs(g_sum -a_sum -a)) { rez.push_back('A'); a_sum += a; } else { rez.push_back('G'); g_sum += g; } } } } cout << rez; return 0; }
#include "graphic/renderer/renderer_manager.h" #include "graphic/renderer/dx12/dx12_renderer.h" namespace BriskEngine { #ifdef GRAPHIC_API_TYPE_DX12 DX12Renderer g_Renderer; #elif defined GRAPHIC_API_TYPE_NA NARenderer g_Renderer; #endif BaseRenderer* g_pRenderer = &g_Renderer; }
def check_version(self, version: str) -> bool: current = AwesomeVersion(self.software_version) required = AwesomeVersion(version) return current >= required
Enlarge By Michael Chow for USA TODAY Louis Hudgin of Gilbert, Ariz., squeezes as much mpg as he can get from his 2000 Honda Insight. JOIN THE DISCUSSION JOIN THE DISCUSSION The average price of a gallon of regular gasoline is rising fast. How has the price affected you? Join the discussion at USA TODAY's Fuel Forum to swap stories and money-saving tips with fellow readers. CALCULATE YOUR GAS COSTS CALCULATE YOUR GAS COSTS Enlarge By Michael Chow for USA TODAY Louis Hudgin says he's averaged 94.2 miles per gallon on his Honda Insight the last two years. He fills the tank about once a month. GILBERT, Ariz. — After a 29-mile jaunt from his Phoenix office to his home here, Louis Hudgin proclaimed his gas mileage "pitiful." He averaged just 88.3 miles per gallon. MAXIMIZING MPG: What experts think of hypermiler techniques TELL US: How do you squeeze the most miles out of every gallon? ACROSS THE USA: Drivers slow down as costs accelerate Most drivers would take a victory lap if they managed to squeeze that kind of mileage out of increasingly precious gasoline. Even on this, a bad day, Hudgin coaxed 28 mpg more out of his 2000 Honda Insight hybrid than its federal highway mpg rating. Hudgin's disappointment — he usually averages about 100 mpg this time of year — stems from his pride in being no ordinary driver. He's a hypermiler, part of a loose-knit legion of commuters who've made racking up seemingly unattainable mpg an art. And a sport. Hypermilers practice such unorthodox techniques as coasting for blocks with their car's engine turned off, driving far below speed limits on the freeway, pumping up tire pressure far beyond car and tire makers' recommendations and carefully manipulating the gas pedal to avoid fuel-burning excess. They endure not only occasional honks from other motorists angry at their slow-poke ways, but intentional discomforts, as well. Like keeping the air conditioning off and windows barely cracked on a more than 90-degree day. Or parking in the boondocks at shopping centers so they can motor head-first toward the exit rather than backing out of a space. Just about anything for an extra one, two, maybe even four mpg. With their odd fixation and log-book scribbles obsessively tracking their mileage, the hypermiler community might in other times be typecast as tightwad eccentrics. But in an era of $4-plus-a-gallon gas, they're garnering increasing attention as driving superstars, even saviors of the planet. "More power to them if that's important to them, and they are accomplishing a goal that also benefits society," says Ron Cogan, publisher of Green Car Journal. Automakers are taking notice. Honda will install an instrument in a new hybrid it will roll out next year that cues drivers for gas-saving actions, such as when to ease off the accelerator, says spokesman Sage Marie. Hypermilers share their triumphs and secrets on a handful of websites. They also gather in some cities as a subset of clubs for hybrid-car owners, which many now are. Driving safety advocates laud some of their habits — but heap scorn on others. "Probably the most beneficial aspect of hypermiling is its emphasis on a less aggressive approach," says Geoff Sundstrom, spokesman for AAA, formerly the American Automobile Association. "The downside of hypermiling is some of the techniques can be extremely dangerous." Turning the engine off while coasting can, in some cars, leave the driver without power steering or brakes and allow the possibility that the steering wheel will lock up. Drivers can endanger themselves and others if they go too slow for the pace of traffic. How about that urge to "draft" trucks — follow close behind for less wind resistance — on the interstate? "There's another term for that. We call it tailgating," Sundstrom deadpans. Safety first Hudgin, a 56-year-old professional pilot, says serious hypermilers always put safety before mileage. Sure, he doesn't mind using a truck as a windbreak — but only, he says, if he can stay at least three seconds back. During the afternoon drive with a reporter, he observes all traffic laws in a mix of city and highway driving. But having to make a stop at Phoenix Sky Harbor International Airport, grabbing a quick taco at a Mexican restaurant and getting stuck in rush-hour traffic on Interstate 10 take their toll on his usual mpg. Normally, Hudgin says, he can roll up 100 mpg in warm weather, about 90 in the winter. But on this day, the kiss of mileage death came when his hybrid's battery started recharging, which causes the hybrid's gasoline engine to work harder and burn more gas. Those few minutes on the freeway slashed his mileage temporarily to a paltry 48 mpg, according to the car's computer. That's 12 below the two-seat, three-cylinder Insight's EPA mpg rating for highway driving. Hudgin is not new to the mileage game. When Hudgin was young, an uncle drove a 1955 Morris Minor, a small English car, to Canada and bragged about 50 mpg. "I thought, 'That's phenomenal.' And it made an impression on me." Hudgin has owned a series of compact vehicles known for their little engines and big mpg, such as a Subaru Justy and Chevy Sprint. He had a long commute, he says, and needed dependable and cheap cars. Some mileage tricks he found himself. "I started going the back way, slowed down from 55 to 45 miles per hour and saw an increase of 16 mpg," says Hudgin, who flies executive planes for the state of Arizona. Slow going on the byways tacked 19 minutes onto his commute, but he says he didn't mind. In 2001, he bought his then-slightly-used Insight, an odd-looking car that was the first hybrid sold in the USA, though in limited numbers. It got the highest EPA mileage rating of any model until it was discontinued. He got 59.6 mpg from his first tank of gas. Not bad, he says, but around 2005, he discovered the world of hypermiling, including Internet sites such as CleanMPG.com, Greenhybrid.com and several others where hypermilers share mileage tactics. Finding miles online Hudgin says tips culled from the sites helped him improve until in 2006, he averaged 112 mpg on a tankful while practicing to be part of a six-man team for the Insight Marathon, a mileage contest. All the claims are unverified, but Hudgin and other hypermilers say the proof is the car's own mileage computer. Others claim similar triumphs. Randall Burkholder of Blackwell, Okla., whose Insight was used in the marathon, says the team got 164 mpg. He says 100 mpg is pretty routine on his 82-mile round-trip commute to work as a machine programmer. "We know what the cars are capable of," he says. Another hypermiler, Sean Welch of Coon Rapids, Minn., says he often gets more than a thousand miles out of the 10.6-gallon gas tank on his Insight. He says he uses the same techniques in his non-hybrid 2002 Hyundai Elantra and also gets astounding mileage. "Hypermiling is a whole suite of tools. It's half science and the other half is art, knowing when to apply them," says Welch, 31. Some of the science comes from a gauge hypermilers put into cars that don't have one built in that offers a continuous reading of their gas mileage at any moment. Without such gauges, it's hard to figure out what works or what doesn't. "There's no magic pill," Hudgin says. "You really need to change your driving style. If you're talking on your cellphone (behind the wheel), it isn't going to happen. You have to concentrate on your driving." Hypermilers have their own lexicon for their tricks. There's "pulse and glide," in which the driver speeds up then shifts into neutral or turns off the engine to coast. There is "ridge riding," which is driving the car off center in a lane to keep its tires out of the tracks worn into pavement from years of traffic. A natural result, of course, of all this intense study is competition for bragging rights as top hypermiler. Many will gather in Madison, Wis., on July 19-20 for Hybridfest, which will pit hybrid owners from Florida to California in a contest to see who can get the best gas mileage over a scenic, winding, 30-mile loop. The entry classes cover a wide range of hybrids, from tiny Insights to one for the guy bringing a new Chevy Tahoe full-size SUV hybrid that is government rated at 21 mpg. "He's pretty sure he can get 35," says Eric Powers, who is organizing the event as part of the Dane County Fair. Adapting to road, weather The strategy behind winning such rivalries is knowing which techniques to use at different times, based on weather and road conditions. Not all work on any given day. "If you leave some on the table, that's fine," says Wayne Gerdes, who runs hypermiling site CleanMPG.com and holds seminars. Gerdes says he gets more than 40 mpg out of his 2003 Ford Ranger pickup. Drivers don't have to be as extreme as the top hypermilers, however, to get better mileage. Slowing down and laying off the brakes has helped California Highway Patrol Officer Heather Hoglund get about 10 mpg more out of her private car, a Toyota Prius hybrid. But she says she's cautious about using hypermiling techniques: "Don't get so hooked on your hypermiling you don't put your foot on the brake when you need to." Using a little gas for comfort Hudgin's wife, Laural says she happily uses her air conditioning and has stopped turning her engine off during stoplight waits "because it dragged down the battery." Yet, by driving smoothly in her diesel-powered 2004 Volkswagen Golf and carefully trying to time stop lights to avoid waits, she averages nearly 50 mpg, she says, about 10 mpg better than the car's EPA rating. "She's a hypermiler in her own right," Louis Hudgin says. But like any spouse, he admits, "I have to bite my tongue while she's driving." TELL US: How do you make every mile per gallon last? Guidelines: You share in the USA TODAY community, so please keep your comments smart and civil. Don't attack other readers personally, and keep your language decent. Use the "Report Abuse" button to make a difference. You share in the USA TODAY community, so please keep your comments smart and civil. Don't attack other readers personally, and keep your language decent. Use the "Report Abuse" button to make a difference. Read more
<reponame>indivisibleatom/toppra from .solverwrapper import SolverWrapper import logging import numpy as np from ..constraint import ConstraintType logger = logging.getLogger(__name__) try: import cvxpy FOUND_CVXPY = True except ImportError: logger.info("CVXPY installation not found.") FOUND_CVXPY = False try: import mosek FOUND_MOSEK = True except ImportError: logger.info("Mosek installation not found!") FOUND_MOSEK = False class cvxpyWrapper(SolverWrapper): """ A solver wrapper using `cvxpy`. Parameters ---------- constraint_list: list of :class:`.Constraint` The constraints the robot is subjected to. path: :class:`.Interpolator` The geometric path. path_discretization: array The discretized path positions. """ def __init__(self, constraint_list, path, path_discretization): super(cvxpyWrapper, self).__init__(constraint_list, path, path_discretization) valid_types = [ConstraintType.CanonicalLinear, ConstraintType.CanonicalConic] # Currently only support Canonical Linear Constraint for constraint in constraint_list: if constraint.get_constraint_type() not in valid_types: raise NotImplementedError def solve_stagewise_optim(self, i, H, g, x_min, x_max, x_next_min, x_next_max): assert i <= self.N and 0 <= i ux = cvxpy.Variable(2) u = ux[0] x = ux[1] cvxpy_constraints = [] if x_min is not None: cvxpy_constraints.append(x_min <= x) if x_max is not None: cvxpy_constraints.append(x <= x_max) if i < self.N: delta = self.get_deltas()[i] if x_next_min is not None: cvxpy_constraints.append(x_next_min <= x + 2 * delta * u) if x_next_max is not None: cvxpy_constraints.append(x + 2 * delta * u <= x_next_max) for k, constraint in enumerate(self.constraints): if constraint.get_constraint_type() == ConstraintType.CanonicalLinear: a, b, c, F, h, ubound, xbound = self.params[k] if a is not None: v = a[i] * u + b[i] * x + c[i] cvxpy_constraints.append(F[i] * v <= h[i]) if ubound is not None: cvxpy_constraints.append(ubound[i, 0] <= u) cvxpy_constraints.append(u <= ubound[i, 1]) if xbound is not None: cvxpy_constraints.append(xbound[i, 0] <= x) cvxpy_constraints.append(x <= xbound[i, 1]) elif constraint.get_constraint_type() == ConstraintType.CanonicalConic: a, b, c, P = self.params[k] d = a.shape[1] for j in range(d): cvxpy_constraints.append( a[i, j] * u + b[i, j] * x + c[i, j] + cvxpy.norm(P[i, j].T[:, :2] * ux + P[i, j].T[:, 2]) <= 0) if H is None: H = np.zeros((self.get_no_vars(), self.get_no_vars())) objective = cvxpy.Minimize(0.5 * cvxpy.quad_form(ux, H) + g * ux) problem = cvxpy.Problem(objective, constraints=cvxpy_constraints) # if FOUND_MOSEK: # problem.solve(solver='MOSEK') # else: # problem.solve() if logger.getEffectiveLevel() == logging.DEBUG: verbose = True else: verbose = False problem.solve(verbose=verbose) if problem.status == cvxpy.OPTIMAL or problem.status == cvxpy.OPTIMAL_INACCURATE: return np.array(ux.value).flatten() else: res = np.empty(self.get_no_vars()) res[:] = np.nan return res
<reponame>mackenbaron/mp42mkv #ifndef __AKA_MATROSKA_IO_CALLBACK_H #define __AKA_MATROSKA_IO_CALLBACK_H #include <cstdio> #include <cstdint> namespace AkaMatroska { namespace Core { struct IOCallback { virtual unsigned Read(void* buf, unsigned size) { return 0; } virtual unsigned Write(const void* buf, unsigned size) = 0; virtual int64_t GetPosition() = 0; //same ftell virtual int64_t SetPosition(int64_t offset) = 0; //same lseek.(-1 is failed.) virtual int64_t GetSize() { return 0; } protected: virtual ~IOCallback() {} }; }} #endif //__AKA_MATROSKA_IO_CALLBACK_H
#include <Scheduler.h> #include <Process.h> #include <liballoc.h> #include <asmlib.h> #include <stdbool.h> #include <Mutex.h> #include <trylock.h> #define NULL 0 typedef struct ProcessNode ProcessNode; typedef struct SleepNode SleepNode; struct ProcessNode { Process * p; uint8_t * descr; uint8_t skip; SleepNode * sn; ProcessNode * next; ProcessNode * prev; }; struct SleepNode { uint64_t ticks; ProcessNode * pn; SleepNode * next; SleepNode * prev; }; extern void togglelock(); extern void linitlock(); extern void fifoinitlock(); extern void changeContextFromRsp(uint64_t rsp); void schedule(); void sleep(uint64_t ticks); static ProcessNode * deleteProcessNode(ProcessNode * p); static void deleteSleepNode(SleepNode * sn); static SleepNode * decrementTicksR(SleepNode * sn); ProcessNode * current = NULL; Process * foreground = NULL; static int process_count = 0; static bool schelduler_interrupts = true; SleepNode * sleeping = NULL; TryLock * tl = NULL; uint64_t insertProcess(void * entry_point,uint64_t rax,uint64_t rdi, uint64_t rsi,uint8_t * descr,uint8_t fg){ Process * p; if(current == NULL){ p=newProcess(entry_point,rax,rdi,rsi,0,fg); } else { p=newProcess(entry_point,rax,rdi,rsi,current->p->pid,fg); } if(fg){ if(foreground == NULL){ foreground = p; } else { foreground->fg = 0; foreground = p; } } ProcessNode * pnode = la_malloc(sizeof(ProcessNode)); pnode->p = p; pnode->descr = descr; pnode->skip = 0; pnode->sn = NULL; if(current == NULL){ current = pnode; pnode->next = pnode; pnode->prev = pnode; } else { ProcessNode * tmp = current->next; current->next = pnode; pnode->next = tmp; pnode->prev = current; tmp->prev = pnode; } process_count++; return p->pid; } void deleteProcessScheduler(uint64_t pid){ if(pid == 0 || pid == 1){ return; } if(current->p->pid == pid){ exit(); } else { ProcessNode * curr = current; for(int i=0; i<process_count ; i++ , curr = curr->next){ if(curr->p->pid == pid){ ProcessNode * cn = curr; if( cn->p->fg == 1){ giveFg(cn->p->ppid); } deleteProcessNode(cn); deleteProcess(cn->p); la_free(cn); process_count--; break; } } } } void schedule(){ current = current->next; for(int i=0; i<process_count ; i++ , current = current->next){ if(!current->skip){ break; } } return; } void * switchStackPointer(void * rsp){ current->p->stack_pointer = rsp; if (trylock(tl)) { schedule(); tryunlock(tl); } return current->p->stack_pointer; } uint64_t fkexec(void * entry_point,uint8_t * descr,Args * args){ return insertProcess(entry_point,0,args->argc,args->argv,descr,args->fg); } void begin(){ tl = tryinit(); linitlock(); fifoinitlock(); togglelock(); _sti(); ((void (*)(void))(current->p->entry_point))(); } void exit(){ ProcessNode * cn = current; if(cn->p->fg == 1){ giveFg(cn->p->ppid); } deleteProcessNode(cn); deleteProcess(cn->p); la_free(cn); process_count--; schedule(); changeContextFromRsp(current->p->stack_pointer); } void * ps(){ ProcessInfo * ans = la_malloc(sizeof(ProcessInfo)); ans->process_count = process_count; ans->PIDs = la_malloc(sizeof(uint64_t)*process_count); ans->descrs = la_malloc(sizeof(uint8_t *)*process_count); ans->status = la_malloc(sizeof(uint8_t *)*process_count); ProcessNode * curr = current; for(int i=0; i<process_count ; i++ , curr = curr->next){ (ans->PIDs)[i]=curr->p->pid; (ans->descrs)[i]=curr->descr; if(curr == current){ /* running */ (ans->status)[i]=0; } else if (curr->skip){ /* sleeping */ (ans->status)[i]=1; } else { /* ready */ (ans->status)[i]=2; } } return (void *)ans; } void sys_sleep(uint64_t ticks){ ProcessNode * cn = current; cn->skip = 1; SleepNode * sn = cn->sn ; if(sn != NULL){ /* No process should enter here */ sn->ticks = ticks; yield(); return; } sn = la_malloc(sizeof(SleepNode)); sn->ticks = ticks; sn->pn = cn; cn->sn = sn; if (sleeping == NULL){ sleeping = sn; sn->next = NULL; sn->prev = NULL; } else { sn->next = sleeping; sleeping->prev = sn; sleeping = sn; } yield(); } void decrementTicks(){ sleeping = decrementTicksR(sleeping); } static SleepNode * decrementTicksR(SleepNode * sn){ if(sn == NULL){ return NULL; } if(sn->ticks == 0){ SleepNode * t = sn->next; sn->pn->skip = 0; sn->pn->sn = NULL; la_free(sn); return decrementTicksR(t); } sn->ticks --; sn->next = decrementTicksR(sn->next); return sn; } static ProcessNode * deleteProcessNode(ProcessNode * n){ ProcessNode * cn = n; ProcessNode * prev = cn->prev; ProcessNode * next = cn->next; prev->next = next; next->prev = prev; SleepNode * sn = n->sn; if(sn != NULL){ deleteSleepNode(sn); } return next; } void wake(uint64_t pid){ ProcessNode * pn = current; for(int i=0 ; i<process_count ; i++ , pn=pn->next){ if(pn->p->pid == pid){ pn->skip = 0; SleepNode * sn = current->sn; if(sn != NULL){ deleteSleepNode(sn); current->sn = NULL; } break; } } } static void deleteSleepNode(SleepNode * sn){ if(sn->prev == NULL && sn->next == NULL){ sleeping == NULL; } else if(sn->prev == NULL){ sleeping = sn->next; } else if(sn->next == NULL){ sn->prev->next = NULL; } else { sn->prev->next = sn->next; sn->next->prev = sn->prev; } la_free(sn); } void mkwait(uint64_t pid){ ProcessNode * pn = current; if(pn->p->pid == pid && pid!=0 && pid!=1){ wait(); return; } for(int i=0 ; i<process_count ; i++ , pn=pn->next){ if(pn->p->pid == pid && pid!=0 && pid!=1){ SleepNode * sn = pn->sn; if(sn != NULL){ deleteSleepNode(sn); pn->sn = NULL; } pn->skip = 1; break; } } } void wait(){ ProcessNode * cn = current; cn->skip = 1; SleepNode * sn = cn->sn; if(sn != NULL){ deleteSleepNode(sn); cn->sn = NULL; } yield(); return; } uint8_t isFg(){ return current->p->fg; } uint64_t currPid(){ return current->p->pid; } uint64_t currPpid(){ return current->p->ppid; } /* * This assumes that the current process * is the foreground process */ void giveFg(uint64_t pid){ foreground->fg = 0; foreground = NULL; Process * shellpr = NULL; ProcessNode * shellpn = NULL; ProcessNode * pn = current; uint8_t found = 0; for(int i=0 ; i<process_count ; i++ , pn=pn->next){ if(pn->p->pid == 1){ shellpr = pn->p; shellpn = pn; } if(pn->p->pid == pid){ pn->p->fg = 1; foreground = pn->p; found = 1; if(pn->skip){ wake(pn->p->pid); } break; } } if(!found){ shellpr->fg = 1; foreground = shellpr; if(shellpn->skip){ wake(shellpr->pid); } } } uint64_t disableScheduler(){ if(tl->lock == 1){ return 1; } while(!trylock(tl)) { yield(); } togglelock(); return 0; } void enableScheduler(uint64_t v){ if(v==1 || tl->lock == 0) return; togglelock(); tryunlock(tl); } void release_lock_and_sleep(mutex * m){ uint64_t v = disableScheduler(); mutex_unlock(m); current->skip = true; enableScheduler(v); yield(); }
def angle_names(zma): return tuple(itertools.chain(central_angle_names(zma), dihedral_angle_names(zma)))
// Copyright 2017 <NAME>. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. #include "http_connection.h" namespace vbench { HttpConnection::HttpConnection(CryptoEngine &crypto, const ServerSpec &s) : _server(s), _socket(crypto, s.host, s.port), _lastUsed(-1000.0) { } bool HttpConnection::mayReuse(double now) const { return (((now - _lastUsed) < 1.0) && !_socket.eof() && !_socket.tainted()); } } // namespace vbench
<reponame>PaaS-TA/PAAS-TA-API-GATEWAY-SERVICE-BROKER package org.paasta.servicebroker.apigateway.service.impl; import lombok.extern.slf4j.Slf4j; import org.openpaas.servicebroker.exception.ServiceBrokerException; import org.openpaas.servicebroker.model.CreateServiceInstanceBindingRequest; import org.openpaas.servicebroker.model.DeleteServiceInstanceBindingRequest; import org.openpaas.servicebroker.model.ServiceInstanceBinding; import org.openpaas.servicebroker.service.ServiceInstanceBindingService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * The type Api gateway service instance binding service. */ @Slf4j @Service public class ApiGatewayServiceInstanceBindingService implements ServiceInstanceBindingService { /** * Instantiates a new Api gateway service instance binding service. */ @Autowired public ApiGatewayServiceInstanceBindingService() { } @Override public ServiceInstanceBinding createServiceInstanceBinding(CreateServiceInstanceBindingRequest request) throws ServiceBrokerException { log.debug("ApiGatewayServiceInstanceBindingService : Bind ServiceInstance :: Not Supported"); throw new ServiceBrokerException("Not Supported"); } @Override public ServiceInstanceBinding deleteServiceInstanceBinding(DeleteServiceInstanceBindingRequest request) throws ServiceBrokerException { log.debug("ApiGatewayServiceInstanceBindingService : Unbind ServiceInstance :: Not Supported"); throw new ServiceBrokerException("Not Supported"); } }
// InsertOne inserts a document into the database func InsertOne(ts *TokenStore, collectionName string, doc *bson.D) (err error) { _, verr := ts.client.Database(ts.dbName).Collection(collectionName).InsertOne(context.Background(), doc) if verr != nil { log.Fatal(verr) err = verr return } return }
<gh_stars>0 #ifndef INTERSECTION_H #define INTERSECTION_H #include "math/real.h" #include "math/vec.h" class Intersection { public: real distance; vec hitpoint; vec normal; Intersection() : distance(0) {} }; #endif
package dane import ( "bufio" "crypto/tls" "fmt" "os" "strconv" "strings" ) const bufsize = 2048 // // DoXMPP connects to an XNPP server, issue a STARTTLS command, negotiates // TLS and returns a TLS connection. See RFC 6120, Section 5.4.2 for details. // func DoXMPP(tlsconfig *tls.Config, daneconfig *Config) (*tls.Conn, error) { var servicename, rolename string var line, transcript string buf := make([]byte, bufsize) server := daneconfig.Server conn, err := getTCPconn(server.Ipaddr, server.Port, daneconfig.TimeoutTCP) if err != nil { return nil, err } reader := bufio.NewReader(conn) writer := bufio.NewWriter(conn) if daneconfig.Servicename != "" { servicename = daneconfig.Servicename } else { servicename = server.Name } switch daneconfig.Appname { case "xmpp-client": rolename = "client" case "xmpp-server": rolename = "server" } // send initial stream header outstring := fmt.Sprintf( "<?xml version='1.0'?><stream:stream to='%s' "+ "version='1.0' xml:lang='en' xmlns='jabber:%s' "+ "xmlns:stream='http://etherx.jabber.org/streams'>", servicename, rolename) transcript += fmt.Sprintf("send: %s\n", outstring) writer.WriteString(outstring) writer.Flush() // read response stream header; look for STARTTLS feature support _, err = reader.Read(buf) if err != nil { return nil, err } line = string(buf) transcript += fmt.Sprintf("recv: %s\n", line) gotSTARTTLS := false if strings.Contains(line, "<starttls") && strings.Contains(line, "urn:ietf:params:xml:ns:xmpp-tls") { gotSTARTTLS = true } if !gotSTARTTLS { return nil, fmt.Errorf("XMPP STARTTLS unavailable") } // issue STARTTLS command outstring = "<starttls xmlns='urn:ietf:params:xml:ns:xmpp-tls'/>" transcript += fmt.Sprintf("send: %s\n", outstring) writer.WriteString(outstring + "\r\n") writer.Flush() // read response and look for proceed element _, err = reader.Read(buf) if err != nil { return nil, err } line = string(buf) transcript += fmt.Sprintf("recv: %s\n", line) if !strings.Contains(line, "<proceed") { return nil, fmt.Errorf("XMPP STARTTLS command failed") } daneconfig.Transcript = transcript return TLShandshake(conn, tlsconfig) } // // DoPOP3 connects to a POP3 server, sends the STLS command, negotiates TLS, // and returns a TLS connection. // func DoPOP3(tlsconfig *tls.Config, daneconfig *Config) (*tls.Conn, error) { var line, transcript string server := daneconfig.Server conn, err := getTCPconn(server.Ipaddr, server.Port, daneconfig.TimeoutTCP) if err != nil { return nil, err } reader := bufio.NewReader(conn) writer := bufio.NewWriter(conn) // Read POP3 greeting line, err = reader.ReadString('\n') if err != nil { return nil, err } line = strings.TrimRight(line, "\r\n") transcript += fmt.Sprintf("recv: %s\n", line) // Send STLS command transcript += "send: STLS\n" writer.WriteString("STLS\r\n") writer.Flush() // Read STLS response, look for +OK line, err = reader.ReadString('\n') if err != nil { return nil, err } line = strings.TrimRight(line, "\r\n") transcript += fmt.Sprintf("recv: %s\n", line) if !strings.HasPrefix(line, "+OK") { return nil, fmt.Errorf("POP3 STARTTLS unavailable") } daneconfig.Transcript = transcript return TLShandshake(conn, tlsconfig) } // // DoIMAP connects to an IMAP server, issues a STARTTLS command, negotiates // TLS, and returns a TLS connection. // func DoIMAP(tlsconfig *tls.Config, daneconfig *Config) (*tls.Conn, error) { var gotSTARTTLS bool var line, transcript string server := daneconfig.Server conn, err := getTCPconn(server.Ipaddr, server.Port, daneconfig.TimeoutTCP) if err != nil { return nil, err } reader := bufio.NewReader(conn) writer := bufio.NewWriter(conn) // Read IMAP greeting line, err = reader.ReadString('\n') if err != nil { return nil, err } line = strings.TrimRight(line, "\r\n") transcript += fmt.Sprintf("recv: %s\n", line) // Send Capability command, read response, looking for STARTTLS transcript += "send: . CAPABILITY\n" writer.WriteString(". CAPABILITY\r\n") writer.Flush() for { line, err = reader.ReadString('\n') if err != nil { return nil, err } line = strings.TrimRight(line, "\r\n") transcript += fmt.Sprintf("recv: %s\n", line) if strings.HasPrefix(line, "* CAPABILITY") && strings.Contains(line, "STARTTLS") { gotSTARTTLS = true } if strings.HasPrefix(line, ". OK") { break } } if !gotSTARTTLS { return nil, fmt.Errorf("IMAP STARTTLS capability unavailable") } // Send STARTTLS transcript += "send: . STARTTLS\n" writer.WriteString(". STARTTLS\r\n") writer.Flush() // Look for OK response line, err = reader.ReadString('\n') if err != nil { return nil, err } line = strings.TrimRight(line, "\r\n") transcript += fmt.Sprintf("recv: %s\n", line) if !strings.HasPrefix(line, ". OK") { return nil, fmt.Errorf("STARTTLS failed to negotiate") } daneconfig.Transcript = transcript return TLShandshake(conn, tlsconfig) } // // parseSMTPline parses an SMTP protocol line, and returns the replycode, // command string, whether the response is done (for a multi-line response), // and an error (on failure). // func parseSMTPline(line string) (int, string, bool, error) { var responseDone = false replycode, err := strconv.Atoi(line[:3]) if err != nil { return 0, "", responseDone, fmt.Errorf("invalid reply code: %s", line) } if line[3] != '-' { responseDone = true } rest := line[4:] return replycode, rest, responseDone, err } // // DoSMTP connects to an SMTP server, checks for STARTTLS support, negotiates // TLS, and returns a TLS connection. // func DoSMTP(tlsconfig *tls.Config, daneconfig *Config) (*tls.Conn, error) { var replycode int var line, rest, transcript string var responseDone, gotSTARTTLS bool server := daneconfig.Server conn, err := getTCPconn(server.Ipaddr, server.Port, daneconfig.TimeoutTCP) if err != nil { return nil, err } reader := bufio.NewReader(conn) writer := bufio.NewWriter(conn) // Read possibly multi-line SMTP greeting for { line, err = reader.ReadString('\n') if err != nil { return nil, err } line = strings.TrimRight(line, "\r\n") transcript += fmt.Sprintf("recv: %s\n", line) replycode, _, responseDone, err = parseSMTPline(line) if err != nil { return nil, err } if responseDone { break } } if replycode != 220 { return nil, fmt.Errorf("invalid reply code (%d) in SMTP greeting", replycode) } // Send EHLO, read possibly multi-line response, look for STARTTLS hostname, err := os.Hostname() if err != nil { return nil, err } ehloCommand := fmt.Sprintf("EHLO %s", hostname) transcript += fmt.Sprintf("send: %s\n", ehloCommand) writer.WriteString(fmt.Sprintf("%s\r\n", ehloCommand)) writer.Flush() for { line, err = reader.ReadString('\n') if err != nil { return nil, err } line = strings.TrimRight(line, "\r\n") transcript += fmt.Sprintf("recv: %s\n", line) replycode, rest, responseDone, err = parseSMTPline(line) if err != nil { return nil, err } if replycode != 250 { return nil, fmt.Errorf("invalid reply code in EHLO response") } if strings.Contains(rest, "STARTTLS") { gotSTARTTLS = true } if responseDone { break } } if !gotSTARTTLS { return nil, fmt.Errorf("SMTP STARTTLS support not detected") } // Send STARTTLS command and read success reply code transcript += "send: STARTTLS\n" writer.WriteString("STARTTLS\r\n") writer.Flush() line, err = reader.ReadString('\n') if err != nil { return nil, err } line = strings.TrimRight(line, "\r\n") transcript += fmt.Sprintf("recv: %s\n", line) replycode, _, _, err = parseSMTPline(line) if err != nil { return nil, err } if replycode != 220 { return nil, fmt.Errorf("invalid reply code to STARTTLS command") } daneconfig.Transcript = transcript return TLShandshake(conn, tlsconfig) } // // StartTLS - // func StartTLS(tlsconfig *tls.Config, daneconfig *Config) (*tls.Conn, error) { switch daneconfig.Appname { case "smtp": return DoSMTP(tlsconfig, daneconfig) case "imap": return DoIMAP(tlsconfig, daneconfig) case "pop3": return DoPOP3(tlsconfig, daneconfig) case "xmpp-client", "xmpp-server": return DoXMPP(tlsconfig, daneconfig) default: return nil, fmt.Errorf("unknown STARTTLS application: %s", daneconfig.Appname) } }
For a long time I have been saying (along with a lot of other people), that Hugo Chavez was running his country into the ground. He diverted investment funds from PDVSA, Venezuela's state-run oil company, into social programs. As long as the price of oil kept rising, he could do that. Unfortunately, Venezuela's sour, heavy crude is particularly hard to get at and refine, and requires a high rate of investment in order to keep production up. As a result, the number of barrels per day (bpd) that Venezuela produces has declined pretty sharply since he took office in 1999. As a consequence, the money that Chavez used to paper over the cracks in his socialist paradise has vanished, and the cracks are deepening: President Hugo Chávez has been facing a public outcry in recent weeks over power failures that, after six nationwide blackouts in the last two years, are cutting electricity for hours each day in rural areas and in industrial cities like Valencia and Ciudad Guayana. Now, water rationing has been introduced here in the capital. The deterioration of services is perplexing to many here, especially because the country had grown used to cheap, plentiful electricity and water in recent decades. But even as the oil boom was enriching his government and Mr. Chávez asserted greater control over utilities and other industries in this decade, public services seemed only to decay, adding to residents' frustrations. With oil revenues declining and the economy slowing, the shortages may have no quick fixes in sight. The government announced some emergency measures this week, including limits on imports of air-conditioning systems, rate increases for consumers of large amounts of power and the building of new gas-fired power plants, which would not be completed until the middle of the next decade. This comes on top of the sporadic food shortages that result from price controls combined with high inflation.
def _translate_type(key: str) -> type: type_map: Dict[str, type] = {"str": str} return type_map[key]
Uniformly Best Biased Estimators in Non-Bayesian Parameter Estimation In this paper, a new structured approach for obtaining uniformly best non-Bayesian biased estimators, which attain minimum-mean-square-error performance at any point in the parameter space, is established. We show that if a uniformly best biased (UBB) estimator exists, then it is unique, and it can be directly obtained from any locally best biased (LBB) estimator. A necessary and sufficient condition for the existence of a UBB estimator is derived. It is shown that if there exists an optimal bias, such that this condition is satisfied, then it is unique, and its closed-form expression is obtained. The proposed approach is exemplified in two nonlinear estimation problems, where uniformly minimum-variance-unbiased estimators do not exist. In the considered examples, we show that the UBB estimators outperform the corresponding maximum-likelihood estimators in the MSE sense.
import java.util.*; public class Main { // inner classes static class Icicle implements Comparable<Icicle> { int t, i; Icicle(int t, int i) { this.t = t; this.i = i; } public int compareTo(Icicle ic) { return this.t - ic.t; } } // main public static final void main(String[] args) throws Exception { Scanner sc = new Scanner(System.in); int n = sc.nextInt(); int l = sc.nextInt(); int[] tis = new int[n]; for (int i = 0; i < n; i++) tis[i] = sc.nextInt(); PriorityQueue<Icicle> q = new PriorityQueue<Icicle>(); boolean[] broken = new boolean[n]; Arrays.fill(broken, false); for (int i = 0; i < n; i++) if ((i == 0 || tis[i] > tis[i - 1]) && (i == n - 1 || tis[i] > tis[i + 1])) { q.add(new Icicle(l - tis[i], i)); broken[i] = true; } int time = 0; while (! q.isEmpty()) { Icicle ic0 = q.poll(); //System.out.println("t=" + ic0.t + ",i=" + ic0.i); time = ic0.t; tis[ic0.i] = 0; if (ic0.i > 0) { int i1 = ic0.i - 1; if (! broken[i1] && (i1 == 0 || tis[i1] > tis[i1 - 1])) { q.add(new Icicle(time + l - tis[i1], i1)); broken[i1] = true; } } if (ic0.i < n - 1) { int i2 = ic0.i + 1; if (! broken[i2] && (i2 == n - 1 || tis[i2] > tis[i2 + 1])) { q.add(new Icicle(time + l - tis[i2], i2)); broken[i2] = true; } } } System.out.println(time); } }
print(sum([100 if s == 'o' else 0 for s in input().strip()]) + 700)
const unsigned char font5x7 [] PROGMEM = { // Font Matrix B00000000, // Space (Char 0x20) B00000000, B00000000, B00000000, B00000000, B00000000, B00000000, 3, B01000000, // ! B01000000, B01000000, B01000000, B01000000, B00000000, B01000000, 2, B10100000, // " B10100000, B10100000, B00000000, B00000000, B00000000, B00000000, 4, B01010000, // # B01010000, B11111000, B01010000, B11111000, B01010000, B01010000, 6, B00100000, // $ B01111000, B10100000, B01110000, B00101000, B11110000, B00100000, 6, B11000000, // % B11001000, B00010000, B00100000, B01000000, B10011000, B00011000, 6, B01100000, // & B10010000, B10100000, B01000000, B10101000, B10010000, B01101000, 6, B11000000, // ' B01000000, B10000000, B00000000, B00000000, B00000000, B00000000, 3, B00100000, // ( B01000000, B10000000, B10000000, B10000000, B01000000, B00100000, 4, B10000000, // ) B01000000, B00100000, B00100000, B00100000, B01000000, B10000000, 4, B00000000, // * B00100000, B10101000, B01110000, B10101000, B00100000, B00000000, 6, B00000000, // + B00100000, B00100000, B11111000, B00100000, B00100000, B00000000, 6, B00000000, // , B00000000, B00000000, B00000000, B11000000, B01000000, B10000000, 3, B00000000, // - B00000000, B11111000, B00000000, B00000000, B00000000, B00000000, 6, B00000000, // . B00000000, B00000000, B00000000, B00000000, B11000000, B11000000, 3, B00000000, // / B00001000, B00010000, B00100000, B01000000, B10000000, B00000000, 6, B01110000, // 0 B10001000, B10011000, B10101000, B11001000, B10001000, B01110000, 6, B01000000, // 1 B11000000, B01000000, B01000000, B01000000, B01000000, B11100000, 4, B01110000, // 2 B10001000, B00001000, B00010000, B00100000, B01000000, B11111000, 6, B11111000, // 3 B00010000, B00100000, B00010000, B00001000, B10001000, B01110000, 6, B00010000, // 4 B00110000, B01010000, B10010000, B11111000, B00010000, B00010000, 6, B11111000, // 5 B10000000, B11110000, B00001000, B00001000, B10001000, B01110000, 6, B00110000, // 6 B01000000, B10000000, B11110000, B10001000, B10001000, B01110000, 6, B11111000, // 7 B10001000, B00001000, B00010000, B00100000, B00100000, B00100000, 6, B01110000, // 8 B10001000, B10001000, B01110000, B10001000, B10001000, B01110000, 6, B01110000, // 9 B10001000, B10001000, B01111000, B00001000, B00010000, B01100000, 6, B00000000, // : B11000000, B11000000, B00000000, B11000000, B11000000, B00000000, 3, B00000000, // ; B11000000, B11000000, B00000000, B11000000, B01000000, B10000000, 3, B00010000, // < B00100000, B01000000, B10000000, B01000000, B00100000, B00010000, 5, B00000000, // = B00000000, B11111000, B00000000, B11111000, B00000000, B00000000, 6, B10000000, // > B01000000, B00100000, B00010000, B00100000, B01000000, B10000000, 5, B01110000, // ? B10001000, B00001000, B00010000, B00100000, B00000000, B00100000, 6, B01110000, // @ B10001000, B00001000, B01101000, B10101000, B10101000, B01110000, 6, B01110000, // A B10001000, B10001000, B10001000, B11111000, B10001000, B10001000, 6, B11110000, // B B10001000, B10001000, B11110000, B10001000, B10001000, B11110000, 6, B01110000, // C B10001000, B10000000, B10000000, B10000000, B10001000, B01110000, 6, B11100000, // D B10010000, B10001000, B10001000, B10001000, B10010000, B11100000, 6, B11111000, // E B10000000, B10000000, B11110000, B10000000, B10000000, B11111000, 6, B11111000, // F B10000000, B10000000, B11110000, B10000000, B10000000, B10000000, 6, B01110000, // G B10001000, B10000000, B10111000, B10001000, B10001000, B01111000, 6, B10001000, // H B10001000, B10001000, B11111000, B10001000, B10001000, B10001000, 6, B11100000, // I B01000000, B01000000, B01000000, B01000000, B01000000, B11100000, 4, B00111000, // J B00010000, B00010000, B00010000, B00010000, B10010000, B01100000, 6, B10001000, // K B10010000, B10100000, B11000000, B10100000, B10010000, B10001000, 6, B10000000, // L B10000000, B10000000, B10000000, B10000000, B10000000, B11111000, 6, B10001000, // M B11011000, B10101000, B10101000, B10001000, B10001000, B10001000, 6, B10001000, // N B10001000, B11001000, B10101000, B10011000, B10001000, B10001000, 6, B01110000, // O B10001000, B10001000, B10001000, B10001000, B10001000, B01110000, 6, B11110000, // P B10001000, B10001000, B11110000, B10000000, B10000000, B10000000, 6, B01110000, // Q B10001000, B10001000, B10001000, B10101000, B10010000, B01101000, 6, B11110000, // R B10001000, B10001000, B11110000, B10100000, B10010000, B10001000, 6, B01111000, // S B10000000, B10000000, B01110000, B00001000, B00001000, B11110000, 6, B11111000, // T B00100000, B00100000, B00100000, B00100000, B00100000, B00100000, 6, B10001000, // U B10001000, B10001000, B10001000, B10001000, B10001000, B01110000, 6, B10001000, // V B10001000, B10001000, B10001000, B10001000, B01010000, B00100000, 6, B10001000, // W B10001000, B10001000, B10101000, B10101000, B10101000, B01010000, 6, B10001000, // X B10001000, B01010000, B00100000, B01010000, B10001000, B10001000, 6, B10001000, // Y B10001000, B10001000, B01010000, B00100000, B00100000, B00100000, 6, B11111000, // Z B00001000, B00010000, B00100000, B01000000, B10000000, B11111000, 6, B11100000, // [ B10000000, B10000000, B10000000, B10000000, B10000000, B11100000, 4, B00000000, // (Backward Slash) B10000000, B01000000, B00100000, B00010000, B00001000, B00000000, 6, B11100000, // ] B00100000, B00100000, B00100000, B00100000, B00100000, B11100000, 4, B00100000, // ^ B01010000, B10001000, B00000000, B00000000, B00000000, B00000000, 6, B00000000, // _ B00000000, B00000000, B00000000, B00000000, B00000000, B11111000, 6, B10000000, // ` B01000000, B00100000, B00000000, B00000000, B00000000, B00000000, 4, B00000000, // a B00000000, B01110000, B00001000, B01111000, B10001000, B01111000, 6, B10000000, // b B10000000, B10110000, B11001000, B10001000, B10001000, B11110000, 6, B00000000, // c B00000000, B01110000, B10001000, B10000000, B10001000, B01110000, 6, B00001000, // d B00001000, B01101000, B10011000, B10001000, B10001000, B01111000, 6, B00000000, // e B00000000, B01110000, B10001000, B11111000, B10000000, B01110000, 6, B00110000, // f B01001000, B01000000, B11100000, B01000000, B01000000, B01000000, 6, B00000000, // g B01111000, B10001000, B10001000, B01111000, B00001000, B01110000, 6, B10000000, // h B10000000, B10110000, B11001000, B10001000, B10001000, B10001000, 6, B01000000, // i B00000000, B11000000, B01000000, B01000000, B01000000, B11100000, 4, B00010000, // j B00000000, B00110000, B00010000, B00010000, B10010000, B01100000, 5, B10000000, // k B10000000, B10010000, B10100000, B11000000, B10100000, B10010000, 5, B11000000, // l B01000000, B01000000, B01000000, B01000000, B01000000, B11100000, 4, B00000000, // m B00000000, B11010000, B10101000, B10101000, B10001000, B10001000, 6, B00000000, // n B00000000, B10110000, B11001000, B10001000, B10001000, B10001000, 6, B00000000, // o B00000000, B01110000, B10001000, B10001000, B10001000, B01110000, 6, B00000000, // p B00000000, B11110000, B10001000, B11110000, B10000000, B10000000, 6, B00000000, // q B00000000, B01101000, B10011000, B01111000, B00001000, B00001000, 6, B00000000, // r B00000000, B10110000, B11001000, B10000000, B10000000, B10000000, 6, B00000000, // s B00000000, B01110000, B10000000, B01110000, B00001000, B11110000, 6, B01000000, // t B01000000, B11100000, B01000000, B01000000, B01001000, B00110000, 6, B00000000, // u B00000000, B10001000, B10001000, B10001000, B10011000, B01101000, 6, B00000000, // v B00000000, B10001000, B10001000, B10001000, B01010000, B00100000, 6, B00000000, // w B00000000, B10001000, B10101000, B10101000, B10101000, B01010000, 6, B00000000, // x B00000000, B10001000, B01010000, B00100000, B01010000, B10001000, 6, B00000000, // y B00000000, B10001000, B10001000, B01111000, B00001000, B01110000, 6, B00000000, // z B00000000, B11111000, B00010000, B00100000, B01000000, B11111000, 6, B00100000, // { B01000000, B01000000, B10000000, B01000000, B01000000, B00100000, 4, B10000000, // | B10000000, B10000000, B10000000, B10000000, B10000000, B10000000, 2, B10000000, // } B01000000, B01000000, B00100000, B01000000, B01000000, B10000000, 4, B00000000, // ~ B00000000, B00000000, B01101000, B10010000, B00000000, B00000000, 6, B01100000, // (Char 0x7F) B10010000, B10010000, B01100000, B00000000, B00000000, B00000000, 5, B00000000, // smiley B01100000, B01100110, B00000000, B10000001, B01100110, B00011000, 5 };
p551 Fullo Unsigned article on pp551‑553 of FULLO (κναφεύς, γναφεύς), also NACCA (Festus, s.v.; Apul. Met. IX p206, Bipont), a fuller, a washer or scourer of cloth and linen. The fullones not only received the cloth as it came from the loom in order to scour and smooth it, but also washed and cleansed garments which had been already worn. As the Romans generally wore woollen dresses, which were often of a light colour, they frequently needed, in the hot climate of Italy, a thorough purification. The way in which this was done has been described by Pliny and other writers, but is most clearly explained by some paintings which have been found on the walls of a fullonica at Pompeii.a Two of these paintings are given by Gell (Pompeiana, vol. II pl. 51, 52), and the whole of them in the Museo Borbonico (vol. IV pl. 49, 50); from the p552 latter of which works the following cuts have been taken. The clothes were first washed, which was done in tubs or vats, where they were trodden upon and stamped by the feet of the fullones, whence Seneca (Ep. 15) speaks of saltus fullonicus. The following woodcut represents four persons thus employed, of whom three are boys, probably under the superintendence of the man. Their dress is tucked up, leaving the legs bare; the boys seem to have done their work, and to be wringing the articles on which they had been employed. Photo © Miko Flohr 2009, by kind permission. The ancients were not acquainted with soap,b but they used in its stead different kinds of alkali, by which the dirt was more easily separated from the clothes. Of these, by far the most common was the urine of men and animals, which was mixed with the water in which the clothes were washed (Plin. H. N. XXVIII 18. 26; Athen. XI p484 ).c To procure a sufficient supply of it, the fullones were accustomed to place at the corners of the streets vessels, which they carried away after they had been filled by the passengers (Martial, VI.93; Macrob. Saturn. II.12). We are told by Suetonius (Vesp. 23) that Vespasian imposed a urinae vectigal, which is supposed by Casaubon and others to have been a tax paid by the fullones. Nitrum, of which Pliny (H. N. XXXI.46) gives an account, was also mixed with water by the scourers. Fullers' earth (creta fullonia, Plin. H. N. XVII.4),º of which there were many kinds, was employed for the same purpose. We do not know the exact nature of this earth, but it appears to have acted in the same way as our fullers' earth, namely, partly in scouring and partly in absorbing the greasy dirt. Pliny (H. N. XXXV.57) says that the clothes should be washed with the Sardinian earth. After the clothes had been washed, they were hung out to dry, and were allowed to be placed in the street before the doors of the fullonica (Dig. 43 tit. 10 s.1 §4). When dry, the wool was brushed and carded to raise the nap, sometimes with the skin of a hedgehog, and sometimes with some plants of the thistle kind. The clothes were then hung on a vessel of basket-work (viminea cavea), under which sulphur was placed in order to whiten the cloth; for the ancient fullers appear to have known that many colours were destroyed by the volatile steam of sulphur (Apul. Met. IX. p208, Bipont; Plin. H. N. XXXV.50, 57; Pollux, VII.41). A fine white earth, called Cimolian by Pliny, was often rubbed into the cloth to increase its whiteness (Theophr. Char. 10 ; Plaut. Aulul. IV.9.6; Plin. H. N. XXXV.57). The preceding account is well illustrated by the following woodcut. Photo © Miko Flohr 2009, by kind permission. On the left we see a fullo brushing or carding a white tunic, suspended over a rope, with a card or brush, which bears considerable resemblance to a modern horse-brush. On the right, another man carries a frame of wicker-work, which was without doubt intended for the purpose described above; he has also a pot in his hand, perhaps intended for holding the sulphur. On his head he wears a kind of garland, which is supposed to be an olive garland, and above him an owl is sitting. It is thought that the olive garland and the owl indicate that the establishment was under the patronage of Minerva, the tutelary goddess of the loom. Sir W. Gell imagines that the owl is probably the picture of a bird which really existed in the family. On the left, a well-dressed female is sitting, examining a piece of work which a younger girl brings to her. A reticulum [see p329A] upon her head, a necklace, and bracelets denote a person of higher rank than one of the ordinary work-people of the establishment. In the following woodcut we see a young man in a green tunic giving a piece of cloth, which appears to be finished, to a woman, who wears a green under-tunic, and over it a yellow tunic with red stripes. On the right is another female in a white tunic, who appears to be engaged in cleaning one of the cards or brushes. Among these paintings there was a press, worked by two upright screws, in which the cloth was placed to be smoothened. A drawing of this press is given on p300. Photo © Miko Flohr 2009, by kind permission. The establishment or workshop of the fullers was called Fullonica (Dig. 39 tit. 3 s3), Fullonicum p553 (Dig. 7 tit. 1 s13 § 8), or Fullonium (Amm. Marc. XIV.11, p44, Bipont.). Of such establishments there were great numbers in Rome, for the Romans do not appear to have washed at home even their linen clothes (Martial, XIV.51). The trade of the fullers was considered so important that the censors, C. Flaminius and L. Aemilius, B.C. 220, prescribed the mode in which the dresses were to be washed (Plin. H. N. XXXV.57). Like the other principal trades in Rome, the Fullones formed a collegium (Fabretti, Inscr. p278). To large farms a fullonica was sometimes attached, in which the work was performed by the slaves who belonged to the familia rustica (Varro, R. R. I.16). The fullo was answerable for the property while it was in his possession; and if he returned by mistake a different garment from the one he had received, he was liable to an action ex locato; to which action he was also subject if the garment was injured (Dig. 19 tit. 2 s13 § 6; s60, § 2; 12 tit. 7 s2). Woollen garments, which had been once washed, were considered to be less valuable than they were previously (Petron. 30; Lamprid. Heliogab. 26); hence Martial (X.1) speaks of a toga lota terque quaterque as a poor present. The Greeks were also accustomed to send their garments to fullers to be washed and scoured, who appear to have adopted a similar method to that which has been described above (Theophr. Char. 10 ; Athen. XI p582D ; Pollux, VII.39, 40, 41). The word πλύνειν denoted the washing of linen, and κναφεύειν or γναφεύειν the washing of woollen, clothes (Eustath. ad Od. XXIV.148 p1956, 41). (Schöttgen Antiquitates Triturae et Fulloniae, Traj. ad Rhen. 127; Beckmann, Hist. of Inventions and Discoveries, vol. III p266, &c., transl.: Becker, Gallus, vol. II p100, &c., Charikles, vol. II p408). Thayer's Notes: a More precisely, the woodcuts are reproductions of frescoes in the fullonica of Veranius Hypsaeus. By 21c standards, the reproductions are inferior; thru the kind courtesy of Dr. Miko Flohr, an authority on Roman fulleries (The World of the Fullo. Work, Economy and Society in Roman Italy, Oxford Studies on the Roman Economy. Oxford: OUP 2013), each of them is now joined by a photograph. ❦ b This is not altogether true. The Roman encyclopedist Pliny writes of soap quite clearly (H. N. 28.191), summarizing how it is made, and attributing it to the inhabitants of Gaul. He uses a word for it that appears to be taken from Celtic: sapo, a cognate of the English word. Later Latin writers to use the word or derivatives include Serenus Sammonicus (de Medicina, 11) and Priscian; and from the Latin, the modern Romance languages take their own words for it: sapone, savon, jabón, etc. In other articles by different contributing authors, by the way, Smith's Dictionary contradicts itself: in the article Unguenta, on perfumes and ointments, soaps are routinely included in the arsenal of personal care products used by the Romans; and in the article Balneae, on the Roman bath, it is mentioned in passing that another authority would have had the Romans using soap to wash themselves. What is puzzling is why, knowing soap, the Romans did not in fact appear to have produced it and used it. It is one of the great inventions, yet is easy and cheap to make; the manufacturing process is a simple one, and involves just two common raw materials, animal fat and wood ash, at least one of which is otherwise a waste product. ❦ c Also Isidore, Orig. XI.1.138. ❦ The careful student can learn a lot about how fullers worked by spending some time on the Fulleries section of Internet Group Ostia's site, in which detailed layouts of seven fulleries of Ostia are given, preceded by an introduction and reconstruction drawings of what they might have been like.
import * as React from 'react'; import { useState } from 'react'; import { View } from 'react-native'; import { UILabel, UISwitcher, UISwitcherVariant, } from '@tonlabs/uikit.hydrogen'; import { ExampleSection } from '../components/ExampleSection'; import { ExampleScreen } from '../components/ExampleScreen'; export const Checkbox = () => { const [switcherSelected, setSwitcherSelected] = useState(false); return ( <ExampleScreen> <ExampleSection title="UISwitcher"> <View style={{ width: 100, paddingVertical: 20, alignItems: 'stretch', }} > <View style={{ flexDirection: 'row', justifyContent: 'space-between', }} > <UILabel>Radio:</UILabel> <UISwitcher variant={UISwitcherVariant.Radio} active={switcherSelected} onPress={() => { console.log('onPress', switcherSelected); setSwitcherSelected((prev) => !prev); }} /> </View> <View style={{ flexDirection: 'row', justifyContent: 'space-between', }} > <UILabel>Check:</UILabel> <UISwitcher variant={UISwitcherVariant.Check} active={switcherSelected} onPress={() => { console.log('onPress', switcherSelected); setSwitcherSelected((prev) => !prev); }} /> </View> <View style={{ flexDirection: 'row', justifyContent: 'space-between', }} > <UILabel>Select:</UILabel> <UISwitcher variant={UISwitcherVariant.Select} active={switcherSelected} onPress={() => { console.log('onPress', switcherSelected); setSwitcherSelected((prev) => !prev); }} /> </View> <View style={{ flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', }} > <UILabel>Toggle:</UILabel> <UISwitcher variant={UISwitcherVariant.Toggle} active={switcherSelected} onPress={() => { console.log('onPress', switcherSelected); setSwitcherSelected((prev) => !prev); }} /> </View> </View> </ExampleSection> <ExampleSection title="UISwitcher disabled "> <View style={{ width: 100, paddingVertical: 20, alignItems: 'stretch', }} > <View style={{ flexDirection: 'row', justifyContent: 'space-between', }} > <UILabel>Radio:</UILabel> <UISwitcher variant={UISwitcherVariant.Radio} active={switcherSelected} disabled onPress={() => { console.log('onPress', switcherSelected); setSwitcherSelected((prev) => !prev); }} /> </View> <View style={{ flexDirection: 'row', justifyContent: 'space-between', }} > <UILabel>Check:</UILabel> <UISwitcher variant={UISwitcherVariant.Check} active={switcherSelected} disabled onPress={() => { console.log('onPress', switcherSelected); setSwitcherSelected((prev) => !prev); }} /> </View> <View style={{ flexDirection: 'row', justifyContent: 'space-between', }} > <UILabel>Select:</UILabel> <UISwitcher variant={UISwitcherVariant.Select} active={switcherSelected} disabled onPress={() => { console.log('onPress', switcherSelected); setSwitcherSelected((prev) => !prev); }} /> </View> <View style={{ flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', }} > <UILabel>Toggle:</UILabel> <UISwitcher variant={UISwitcherVariant.Toggle} active={switcherSelected} disabled onPress={() => { console.log('onPress', switcherSelected); setSwitcherSelected((prev) => !prev); }} /> </View> </View> </ExampleSection> </ExampleScreen> ); };
def add_member(self, potential_member): for member in self.chat_members: if member.are_friends(potential_member): self.chat_members.append(potential_member) return True return False